+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.RHPKf7u84H --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest . --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium: Requirement ram is redefined 16 -> 28 Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch: Requirement ram is redefined 16 -> 28 Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration | 5.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a | 5.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a | 5.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a | 5.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a | 5.9%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a | 6.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a | 6.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a | 6.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a | 6.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a | 6.2%| PREPARE $(VCS) | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a | 6.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a | 6.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a | 7.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a | 7.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a | 7.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a | 7.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |13.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |13.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |13.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |13.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |14.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |14.7%| PREPARE $(YMAKE_PYTHON3-212672652) - 8.40 MB |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |16.9%| PREPARE $(LLD_ROOT-3107549726) - 33.01 MB |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |17.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |19.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |19.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |19.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |19.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |20.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |20.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |20.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |20.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |20.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |21.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |21.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |21.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |21.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |21.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |21.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |21.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |21.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |21.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |21.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |21.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |21.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |21.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |21.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |22.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |22.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |22.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |22.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |22.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |22.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |22.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |22.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |23.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |23.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |23.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |23.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |23.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |23.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |23.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |23.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |23.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |24.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |24.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |24.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |24.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |24.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |24.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |24.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |24.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |24.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |25.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |25.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |25.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |26.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |25.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |25.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |26.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |26.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |26.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |26.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |26.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |27.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |27.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |27.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |27.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |27.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |27.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |27.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |28.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |28.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |28.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |28.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |28.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |28.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |28.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |28.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |28.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |28.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |28.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |29.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |28.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |28.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |29.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |29.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |29.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |29.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |29.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |29.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |29.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |29.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |30.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |30.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |30.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |30.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |30.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |30.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |30.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |30.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |30.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |30.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |30.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |30.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |31.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |31.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |31.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |31.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |31.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |31.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |31.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |31.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |31.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |31.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |32.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |32.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |32.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |32.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |33.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |33.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |33.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |33.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |33.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |34.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |34.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |34.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |34.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |39.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |50.7%| PREPARE $(CLANG_FORMAT-2963054096) |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |51.1%| PREPARE $(RUFF-3583455953) - 8.40 MB |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |51.3%| PREPARE $(FLAKE8_PY3-3596799299) - 8.40 MB |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |51.7%| PREPARE $(TEST_TOOL_HOST-sbr:9985353571) - 33.57 MB |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.8%| PREPARE $(PYTHON) - 50.35 MB |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/third_party/utf8_range/libprotobuf-third_party-utf8_range.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |52.5%| PREPARE $(CLANG-359471697) - 324.03 MB |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |52.8%| PREPARE $(CLANG20-867705943) |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |52.7%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |52.8%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |52.9%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |53.0%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |54.3%| PREPARE $(CLANG-1922233694) - 218.12 MB |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |54.4%| PREPARE $(FLAKE8_PY2-2255386470) - 8.40 MB |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |54.5%| PREPARE $(CLANG16-1380963495) - 302.01 MB |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |54.8%| PREPARE $(CLANG18-1866954364) - 318.80 MB |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |54.9%| PREPARE $(WITH_JDK17-sbr:9470949154) - 184.57 MB |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |55.3%| PREPARE $(WITH_JDK-sbr:9470949154) |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |55.5%| PREPARE $(JDK17-2548586558) |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |55.8%| PREPARE $(JDK_DEFAULT-2548586558) |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |56.9%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |57.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |57.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |57.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |57.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |57.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_join_common.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_block_hash_join.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_scalar_hash_join.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/yql_common_dq_factory.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/type_utils.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_program_builder.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_hash_operator_serdes.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_hash_join_table.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_hash_operator_common.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_hash_aggregate.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_hash_combine.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |58.4%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |58.4%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_join_common.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |58.4%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/yql/essentials/minikql/computation/mkql_llvm_base.h |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |58.4%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_block_hash_join.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |58.5%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_scalar_hash_join.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |58.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/libdq-comp_nodes-no_llvm.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_block_hash_join.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_join_common.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_scalar_hash_join.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_hash_aggregate.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_hash_join_table.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_hash_operator_serdes.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_program_builder.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_hash_operator_common.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/yql_common_dq_factory.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/type_utils.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_hash_combine.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |59.0%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_block_hash_join.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |59.1%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |59.0%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/yql/essentials/minikql/computation/mkql_llvm_base.h |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |59.1%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_join_common.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |59.2%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_scalar_hash_join.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/mon_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/cdc |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/reconfig_state_storage_workload |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/transfer |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |58.8%| PREPARE $(GDB) - 25.18 MB |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |58.9%| PREPARE $(BLACK-1356850112) - 3.24 MB |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a >> test.py::py2_flake8 [GOOD] |58.5%| [TS] ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a >> test_dml.py::flake8 [GOOD] >> test_kafka_streams.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |57.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload_topic |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |57.9%| [TS] ydb/tests/stress/kafka/tests/flake8 >> test_kafka_streams.py::flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a >> test_add_column.py::flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |57.7%| [TS] ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |57.5%| [TS] ydb/tests/datashard/add_column/flake8 >> test_add_column.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/no_llvm/libminikql-codegen-no_llvm.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |57.5%| [TS] ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |57.5%| [TS] ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/no_llvm/libminikql-computation-no_llvm.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_issues_buffer_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/no_llvm/libminikql-invoke_builtins-no_llvm.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/mock_lookup_factory.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_actor_ut.cpp >> test.py::flake8 [GOOD] |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_source_watermark_tracker_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_actor_async_input_helper_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_async_compute_actor_ut.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/ut/proto/libcompute-ut-proto.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |57.8%| [TS] ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> test.py::py2_flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |58.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_async_replication.py::flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |58.0%| [TS] ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |58.0%| [TS] ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp >> test.py::flake8 [GOOD] |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/no_llvm/libminikql-comp_nodes-no_llvm.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.5%| [TS] ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> test_dump_restore.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> test_leader_start_inflight.py::flake8 [GOOD] |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |57.0%| [TS] ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |56.6%| [TS] ydb/tests/stress/topic/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |56.8%| [TS] ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |56.9%| [TS] ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |56.9%| [TS] ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp >> test_workload.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |56.9%| [TS] ydb/tests/stress/s3_backups/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a >> test.py::py2_flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |57.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |57.0%| [TS] ydb/tests/stress/transfer/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a >> test.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |57.1%| [TS] ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_vector_index.py::flake8 [GOOD] >> test_vector_index_negative.py::flake8 [GOOD] >> ydb-tests-fq-mem_alloc::import_test [GOOD] >> test_sql.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a >> test_simple_table.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a >> test_tpch_import.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> test_types_and_formats.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |57.2%| [TS] ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |57.2%| [TS] ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |57.2%| [TS] ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/ut/kqp_executer_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |57.3%| [TS] ydb/tests/datashard/vector_index/medium/flake8 >> test_vector_index_negative.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.3%| [TS] ydb/tests/olap/s3_import/flake8 >> test_types_and_formats.py::flake8 [GOOD] |57.3%| [TS] ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |57.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |57.3%| [TS] ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test_vdisks.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |57.4%| [TS] ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a >> test_workload.py::flake8 [GOOD] |57.4%| [TS] ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |57.4%| [TS] ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |57.4%| [TS] ydb/tests/functional/statistics/flake8 >> test_restarts.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |57.4%| [TS] ydb/tests/functional/blobstorage/flake8 >> test_vdisks.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |57.5%| [TS] ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_streaming.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |57.5%| [TS] ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |57.5%| [TS] ydb/tests/stress/viewer/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp >> test.py::py2_flake8 [GOOD] >> ydb-tests-fq-streaming::import_test [GOOD] |57.5%| [TS] ydb/tests/fq/streaming/flake8 >> test_streaming.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |57.6%| [TS] ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |57.6%| [TS] ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |57.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming/import_test >> ydb-tests-fq-streaming::import_test [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp >> test.py::py2_flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |57.7%| [TS] ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |57.7%| [TS] ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp >> test_ttl.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |57.7%| [TS] ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |57.7%| [TS] ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] |57.6%| [TS] ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |57.7%| [TS] ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |57.7%| [TS] ydb/tests/stress/show_create/view/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.7%| [TS] ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |57.6%| [TS] ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |57.7%| [TS] ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp >> test.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |57.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a >> test.py::py2_flake8 [GOOD] >> reconfig_state_storage_workload_test.py::flake8 [GOOD] >> test_board_workload.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] >> test_state_storage_workload.py::flake8 [GOOD] >> test_cte.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |57.6%| [TS] ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |57.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp >> __main__.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |57.6%| [TS] ydb/tests/functional/kqp/plan2svg/flake8 >> test_cte.py::flake8 [GOOD] |57.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp >> base.py::flake8 [GOOD] >> basic_reading.py::flake8 [GOOD] >> data_paging.py::flake8 [GOOD] >> listing_paging.py::flake8 [GOOD] >> settings_validation.py::flake8 [GOOD] |57.5%| [TS] ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 >> test_state_storage_workload.py::flake8 [GOOD] |57.5%| [TS] ydb/tests/library/compatibility/binaries/downloader/flake8 >> __main__.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp >> test.py::flake8 [GOOD] >> test_split_merge.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.4%| [TS] ydb/tests/solomon/reading/flake8 >> settings_validation.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> test.py::py2_flake8 [GOOD] |57.4%| [TS] ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] >> test_large_import.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |57.4%| [TS] ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |57.4%| [TS] ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |57.5%| [TS] ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |57.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] >> test_secondary_index.py::flake8 [GOOD] |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |57.5%| [TS] ydb/tests/olap/s3_import/large/flake8 >> test_large_import.py::flake8 [GOOD] |57.5%| [TS] ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp >> test.py::py2_flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |57.5%| [TS] ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |57.2%| [TS] ydb/tests/datashard/secondary_index/flake8 >> test_secondary_index.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |57.2%| [TS] ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |57.3%| [TS] ydb/tests/stress/node_broker/tests/flake8 >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.0%| [TS] ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> test_break.py::flake8 [GOOD] |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |57.0%| [TS] ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |57.0%| [TS] ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |57.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |57.1%| [TS] ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |57.1%| [TS] ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |57.1%| [TS] ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |57.0%| [TS] ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |57.0%| [TS] ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |57.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |57.1%| [TS] ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp >> test_workload.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |57.1%| [TS] ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a >> test_partitioning.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |57.1%| [TS] ydb/tests/stress/cdc/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> test_sql_negative.py::flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> http_helpers.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test_canonical_records.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |57.2%| [TS] ydb/tests/datashard/partitioning/flake8 >> test_partitioning.py::flake8 [GOOD] |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |57.3%| [TS] ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |57.3%| [TS] ydb/tests/functional/audit/flake8 >> test_canonical_records.py::flake8 [GOOD] |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test_encryption.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |57.0%| [TS] ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp >> test_decimal.py::flake8 [GOOD] >> test_duplicates.py::flake8 [GOOD] >> test_parallel.py::flake8 [GOOD] >> test_s1.py::flake8 [GOOD] >> test_s_float.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |57.1%| [TS] ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |57.1%| [TS] ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |57.1%| [TS] ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |57.1%| [TS] ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_delete_all_after_inserts.py::flake8 [GOOD] >> test_delete_by_explicit_row_id.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/functional/tpc/medium/tpch/flake8 >> test_s_float.py::flake8 [GOOD] |57.1%| [TS] ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |57.0%| [TS] ydb/tests/olap/delete/flake8 >> test_delete_by_explicit_row_id.py::flake8 [GOOD] |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |57.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] >> data_read_correctness.py::flake8 [GOOD] >> order_by_with_limit.py::flake8 [GOOD] >> tablets_movement.py::flake8 [GOOD] >> test_create.py::flake8 [GOOD] >> test_cs_many_updates.py::flake8 [GOOD] >> test_delete.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> test_overloads.py::flake8 [GOOD] >> test_replace.py::flake8 [GOOD] >> test_select.py::flake8 [GOOD] >> test_update.py::flake8 [GOOD] >> test_upsert.py::flake8 [GOOD] >> upgrade_to_internal_path_id.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |57.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/libpy3compatibility-binaries-downloader.global.a >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |56.9%| [TS] ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp >> test.py::py2_flake8 [GOOD] |56.9%| [TS] ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |56.9%| [TS] ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |56.9%| [TS] ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |56.9%| [TS] ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |56.9%| [TS] ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_clean.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_default_path.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] >> ydb-tests-functional-serverless::import_test [GOOD] |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |56.8%| [TS] ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [TS] ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [TS] ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |56.7%| [TS] ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |56.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |56.8%| [TS] ydb/tests/functional/tpc/medium/flake8 >> test_workload_manager.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.7%| [TS] ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |56.7%| [TS] ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/downloader |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> ydb-tests-functional-wardens::import_test [GOOD] |56.4%| [TS] ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.4%| [TS] ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |56.2%| [TS] ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.2%| [TS] ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_copy_table.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] >> test_parametrized_queries.py::flake8 [GOOD] |55.9%| [TS] ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |56.0%| [TS] ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |56.0%| [TS] ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.0%| [TS] ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |56.1%| [TS] ydb/tests/datashard/ttl/flake8 >> test_ttl.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> ydb-tests-stress-topic-tests::import_test [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_cdc.py::flake8 [GOOD] >> test_workload_ctas.py::flake8 [GOOD] >> test_workload_kafka.py::flake8 [GOOD] >> test_workload_kv.py::flake8 [GOOD] >> test_workload_log.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test_workload_mixed.py::flake8 [GOOD] >> test_workload_node_broker.py::flake8 [GOOD] >> test_workload_olap.py::flake8 [GOOD] >> test_workload_oltp.py::flake8 [GOOD] >> test_workload_reconfig_state_storage.py::flake8 [GOOD] >> test_workload_show_create.py::flake8 [GOOD] >> test_workload_simple_queue.py::flake8 [GOOD] >> test_workload_statistics.py::flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] >> test_workload_topic_kafka.py::flake8 [GOOD] >> test_workload_transfer.py::flake8 [GOOD] >> ydb-tests-stress-ctas-tests::import_test [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |56.0%| [TS] ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |56.1%| [TS] ydb/tests/datashard/parametrized_queries/flake8 >> test_parametrized_queries.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_secrets.py::flake8 [GOOD] >> test_secrets_usage.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/ctas/tests/import_test >> ydb-tests-stress-ctas-tests::import_test [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp >> test_s3.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/topic/tests/import_test >> ydb-tests-stress-topic-tests::import_test [GOOD] |56.1%| [TS] ydb/tests/olap/load/flake8 >> test_workload_transfer.py::flake8 [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |56.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |56.1%| [TS] ydb/tests/functional/secrets/flake8 >> test_secrets_usage.py::flake8 [GOOD] |56.1%| [TS] ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] >> ydb-tests-fq-restarts::import_test [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] >> ydb-tests-functional-encryption::import_test [GOOD] >> test.py::py2_flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |56.0%| [TS] ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |56.0%| [TS] ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |56.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] >> stress-reconfig_state_storage_workload-tests::import_test [GOOD] |55.9%| [TS] ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |56.0%| [TS] ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |56.0%| [TS] ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |56.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |56.0%| [TS] ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |56.0%| [TS] ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |56.1%| [TS] ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |55.8%| [TS] ydb/tests/stress/topic_kafka/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/reconfig_state_storage_workload/tests/import_test >> stress-reconfig_state_storage_workload-tests::import_test [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> test_workload.py::flake8 [GOOD] >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] >> pile_promotion_test.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] >> ydb-tests-stress-cdc-tests::import_test [GOOD] |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |55.7%| [TS] ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |55.8%| [TS] ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |56.0%| [TS] ydb/tests/stress/ctas/tests/flake8 >> test_workload.py::flake8 [GOOD] >> ydb-tests-stress-transfer-tests::import_test [GOOD] >> test.py::flake8 [GOOD] |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp >> tablet_scheme_tests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |55.9%| [TS] ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 >> test_scheme_board_workload.py::flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/import_test >> ydb-tests-stress-transfer-tests::import_test [GOOD] |55.9%| [TS] ydb/tests/functional/tpcc/flake8 >> test.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |56.0%| [TS] ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/cdc/tests/import_test >> ydb-tests-stress-cdc-tests::import_test [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |55.9%| [TS] ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp >> downloader::import_test [GOOD] >> test_query_cache.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_grants.py::flake8 [GOOD] >> test_paths_lookup.py::flake8 [GOOD] >> overlapping_portions.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |56.0%| [TS] ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/binaries/downloader/import_test >> downloader::import_test [GOOD] >> test_example.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |55.9%| [TS] ydb/tests/functional/security/flake8 >> test_paths_lookup.py::flake8 [GOOD] |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |55.9%| [TS] ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |55.9%| [TS] ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |56.0%| [TS] ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |56.0%| [TS] ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp >> test_workload.py::flake8 [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |56.0%| [TS] ydb/tests/stress/mixedpy/tests/flake8 >> test_workload.py::flake8 [GOOD] >> test_select.py::flake8 [GOOD] |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |55.0%| [TS] ydb/tests/datashard/select/flake8 >> test_select.py::flake8 [GOOD] |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp >> basic_user_scenarios.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |55.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ydb-tests-example |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_audit.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |55.2%| [TS] ydb/tests/functional/backup_collection/flake8 >> conftest.py::flake8 [GOOD] |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |55.3%| [TS] ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |55.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_b96df764969d83c871c54cf9e5.o |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |55.3%| [TS] ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp >> allure_utils.py::flake8 [GOOD] >> remote_execution.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |55.4%| [TS] ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] |54.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/medium/import_test >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] |55.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |54.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/block_layout_converter_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/packed_tuple_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/ydb-library-yql-dq-comp_nodes-hash_join_utils-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> ydb-tests-functional-query_cache::import_test [GOOD] |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/accumulator_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/hash_table_ut.cpp |54.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |54.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |54.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |54.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |54.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp >> ydb-tests-example::import_test [GOOD] |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |54.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_join_common.cpp |55.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |55.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/s3_backups |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |55.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |55.0%| [PR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/include/llvm/IR/Attributes.inc{, .d} |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |55.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_scalar_hash_join.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |55.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |57.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |57.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_block_hash_join.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |58.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_join_common.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |58.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_scalar_hash_join.cpp |59.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/dq_block_hash_join.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |61.7%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/libdq-comp_nodes-no_llvm.a |66.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |67.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |68.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |68.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] |71.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |71.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/libdq-comp_nodes-no_llvm.a |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |71.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/s3_backups/tests/import_test >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |72.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |73.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> ydb-tests-functional-backup_collection::import_test [GOOD] >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |74.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |75.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |75.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup_collection/import_test >> ydb-tests-functional-backup_collection::import_test [GOOD] |75.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |76.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/plan2svg/import_test >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |80.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/packed_tuple_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/packed_tuple_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |80.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |80.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |80.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |80.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut >> ydb-tests-olap-column_family-compression::import_test [GOOD] |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |80.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |80.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/column_family/compression/import_test >> ydb-tests-olap-column_family-compression::import_test [GOOD] |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |80.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |80.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_join_common.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |80.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_join_common.cpp |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |80.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |79.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |79.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |79.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |79.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |79.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TAsyncComputeActorTest::Empty [GOOD] >> TAsyncComputeActorTest::Basic |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> ydb-tests-functional-sqs-common::import_test [GOOD] |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |79.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |79.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |79.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |79.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |78.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |78.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |78.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |78.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |78.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |78.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |79.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |79.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |79.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_scalar_hash_join.cpp |79.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |79.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |79.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |79.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |79.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |79.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |79.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |79.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> ydb-tests-stress-log-tests::import_test [GOOD] |79.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_scalar_hash_join.cpp |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |79.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |79.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |79.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |79.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |79.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |79.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_block_hash_join.cpp |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |79.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/dq_block_hash_join.cpp |79.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |79.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |79.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |79.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> ydb-tests-datashard-dump_restore::import_test [GOOD] |79.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |79.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> ydb-tests-stress-kafka-tests::import_test [GOOD] |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |78.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kafka/tests/import_test >> ydb-tests-stress-kafka-tests::import_test [GOOD] |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |78.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |78.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |78.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |78.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |78.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |78.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |78.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |78.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |78.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |78.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |77.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |77.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |77.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] >> TAsyncComputeActorTest::Basic [GOOD] |76.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/tests/import_test >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] >> TAsyncComputeActorTest::StatsMode [GOOD] >> TAsyncComputeActorTest::InputTransformMultichannel >> ydb-core-viewer-tests::import_test [GOOD] |76.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |74.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/ut/yql-dq-comp_nodes-hash_join_utils-simd-ut |74.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/block_layout_converter_ut.cpp >> SimdFallback::CRC [GOOD] >> SimdFallback::ShiftBytes [GOOD] >> TSimdAVX2::SimdBool [GOOD] >> TSimdSSE42::SimdUInt8 [GOOD] >> SimdFallback::Shuffle [GOOD] >> TSimdSSE42::Shuffle [GOOD] >> TSimdSSE42::SimdInt8 [GOOD] >> TSimdSSE42::ShiftBytes [GOOD] >> TSimdAVX2::Shuffle128 [GOOD] >> TSimdAVX2::SimdTrait [GOOD] >> TSimdAVX2::SimdUInt8 [GOOD] >> SimdFallback::Blend [GOOD] >> TSimdSSE42::UnpackMask [GOOD] >> TSimdSSE42::Blend [GOOD] >> TSimdAVX2::SimdInt8 [GOOD] >> TSimdAVX2::CRC [GOOD] >> TSimdAVX2::ShiftBytes [GOOD] >> TSimdSSE42::SimdBool [GOOD] >> TSimdSSE42::SimdTrait [GOOD] >> TSimdAVX2::Shuffle [GOOD] >> TSimdAVX2::Blend [GOOD] >> SimdFallback::BestTrait [GOOD] >> SimdFallback::UnpackMask [GOOD] >> SimdFallback::SimdTrait [GOOD] >> TSimdAVX2::UnpackMask [GOOD] >> ydb-tests-datashard-secondary_index::import_test [GOOD] |73.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/ut/unittest >> TSimdAVX2::UnpackMask [GOOD] |73.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/secondary_index/import_test >> ydb-tests-datashard-secondary_index::import_test [GOOD] |72.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/block_layout_converter_ut.cpp >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |70.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |70.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |69.9%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |68.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |68.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |68.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |68.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |68.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |68.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 >> QueryStats::Ranges [GOOD] |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |67.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> PartitionStats::CollectorOverload [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/partition_stats/ut/unittest |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TAsyncComputeActorTest::InputTransformMultichannel [GOOD] |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |66.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow >> TComputeActorAsyncInputHelperTest::PollAsyncInput [GOOD] >> Yq_1::Basic >> Yq_1::ModifyConnections |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow >> TComputeActorTest::Empty [GOOD] >> TComputeActorTest::ReceiveData [GOOD] >> TComputeActorTest::PollAckPoll [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovement [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovementWithIdleness [GOOD] >> TDqSourceWatermarkTrackerTest::IdleFirstShouldReturnNothing [GOOD] >> TDqSourceWatermarkTrackerTest::Idle1 [GOOD] >> TDqSourceWatermarkTrackerTest::IdleNextCheckAt [GOOD] >> TIssuesBufferTest::TestEmpty [GOOD] >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::CreateQuery_With_Idempotency >> PrivateApi::PingTask >> Yq_1::Basic_Null >> Yq_1::ListConnections >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> Yq_1::DeleteConnections >> Yq_1::DescribeJob >> Yq_1::DescribeConnection >> TIssuesBufferTest::TestSimplePush [GOOD] >> TIssuesBufferTest::TestPushWithOverflow [GOOD] >> TIssuesBufferTest::TestSmallBuffer [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait >> TIssuesBufferTest::TestUseAfterDump [GOOD] >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false |66.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] |66.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] |66.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> KqpDataIntegrityTrails::BrokenReadLock+UseSink >> KqpDataIntegrityTrails::BrokenReadLock-UseSink >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false |65.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |65.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> PrivateApi::PingTask [GOOD] >> KqpDataIntegrityTrails::Select >> KqpDataIntegrityTrails::Ddl >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> PrivateApi::GetTask >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true |65.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/ctas >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 18624, MsgBus: 3203 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009c8/r3tmp/tmpfGyZuF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18624, node 1 TClient is connected to server localhost:3203 TClient is connected to server localhost:3203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> KqpDataIntegrityTrails::Select [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30815, MsgBus: 27156 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a31/r3tmp/tmpvFFy0V/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30815, node 1 TClient is connected to server localhost:27156 TClient is connected to server localhost:27156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24800, MsgBus: 29247 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a1b/r3tmp/tmp9kIMrc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24800, node 1 TClient is connected to server localhost:29247 TClient is connected to server localhost:29247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64953, MsgBus: 9107 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a2f/r3tmp/tmp7qdazv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64953, node 1 TClient is connected to server localhost:9107 TClient is connected to server localhost:9107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> Yq_1::Basic_Null [GOOD] >> TSchemeShardServerLess::StorageBilling >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> PrivateApi::GetTask [GOOD] >> Yq_1::Basic [GOOD] >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::StorageBillingLabels >> IndexBuildTestReboots::IndexPartitioningUniq >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeConnection [GOOD] >> Yq_1::ListConnections [GOOD] >> IndexBuildTestReboots::BaseCase |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] |65.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> Yq_1::Basic_TaggedLiteral >> Yq_1::CreateConnections_With_Idempotency |65.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |65.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> PrivateApi::Nodes >> Yq_1::Basic_EmptyList >> IndexBuildTestReboots::DropIndexWithDataColumnsUniq >> IndexBuildTestReboots::UniqueIndexValidationFailsInsideShard >> IndexBuildTestReboots::DropIndexWithDataColumns >> IndexBuildTestReboots::IndexPartitioning >> IndexBuildTestReboots::BaseCaseWithDataColumns >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> IndexBuildTestReboots::DropIndex >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> Yq_1::DescribeQuery >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> Yq_1::DeleteQuery >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 6155, MsgBus: 1399 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a1d/r3tmp/tmpMWeAfF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6155, node 1 TClient is connected to server localhost:1399 TClient is connected to server localhost:1399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 22547, MsgBus: 27101 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009f0/r3tmp/tmpIC6zjR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22547, node 1 TClient is connected to server localhost:27101 TClient is connected to server localhost:27101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 23005, MsgBus: 4369 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a1e/r3tmp/tmpg4F10B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23005, node 1 TClient is connected to server localhost:4369 TClient is connected to server localhost:4369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 11014, MsgBus: 25927 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009f4/r3tmp/tmpwKg8Zw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11014, node 1 TClient is connected to server localhost:25927 TClient is connected to server localhost:25927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 21650, MsgBus: 65060 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000a12/r3tmp/tmpNO6eYe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21650, node 1 TClient is connected to server localhost:65060 TClient is connected to server localhost:65060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |65.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.528049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.528083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.528090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.528096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.528101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.528105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.528116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.528132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.528257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.528312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.542216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.542244Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.545941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.545985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.546044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.547614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.548297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.548418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.548510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.549701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.549754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.550005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.550019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.550051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.550060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.550068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.550100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.551612Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.577580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.577681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.577759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.577770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.577818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.577839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.578627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.578683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.578759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.578772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.578778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.578785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.579201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.579213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.579219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.579604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.579620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.579626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.579635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.580407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.580846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.580905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.581140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.581173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.581182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.581277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.581287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.581334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.581349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.581922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.581934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... D DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-10-24T16:20:32.402777Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 3 -> 128 2025-10-24T16:20:32.403227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.403271Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.403278Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.403283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-10-24T16:20:32.403290Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.403323Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.403683Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-10-24T16:20:32.403722Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-10-24T16:20:32.403800Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.403820Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 34359740528 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.403825Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-10-24T16:20:32.403916Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-10-24T16:20:32.403925Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-10-24T16:20:32.403950Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.403973Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:578: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:368:2340], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:20:32.404325Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.404331Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:20:32.404370Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.404374Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:20:32.404443Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.404451Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:761: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-10-24T16:20:32.404454Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 240 -> 240 2025-10-24T16:20:32.404540Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.404549Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.404552Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:20:32.404556Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:20:32.404560Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-10-24T16:20:32.404569Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:20:32.404995Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.405005Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:20:32.405016Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:20:32.405019Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.405022Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:20:32.405025Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.405028Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:20:32.405036Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:309:2298] message: TxId: 102 2025-10-24T16:20:32.405041Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.405045Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:20:32.405049Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:20:32.405071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.405122Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:20:32.405388Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.405398Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:513:2454] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-10-24T16:20:32.406072Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.406096Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1079: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-10-24T16:20:32.406099Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1085: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-10-24T16:20:32.406121Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-10-24T16:20:32.406128Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-10-24T16:20:32.406470Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.406514Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:32.845851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:32.845869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.845875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:32.845882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:32.845887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:32.845891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:32.845908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.845920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:32.846030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:32.846086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:32.862484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:32.862506Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:32.865810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:32.865842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:32.865879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:32.867163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:32.867319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:32.867421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.867496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:32.868454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.868490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:32.868692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.868702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.868729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:32.868736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.868743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:32.868767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.870182Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:32.887097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.887142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.887175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:32.887181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:32.887234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:32.887246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:32.887736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.887766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:32.887797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.887803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:32.887806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:32.887809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:32.888262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.888273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.888278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:32.888534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.888540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.888543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.888547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.888988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.889394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:32.889456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:32.889657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.889686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.889693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.889751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:32.889760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.889785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.889797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:32.890243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.890250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-10-24T16:20:33.054853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-10-24T16:20:33.054893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409546 2025-10-24T16:20:33.055568Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2025-10-24T16:20:33.055819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186234409547 2025-10-24T16:20:33.056034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-10-24T16:20:33.056077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:20:33.056156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-10-24T16:20:33.056201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:33.056208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:20:33.056227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:20:33.056306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:33.056312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:20:33.056322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:20:33.056676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:20:33.056685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-10-24T16:20:33.056696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-10-24T16:20:33.056698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-10-24T16:20:33.056925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:20:33.056932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-10-24T16:20:33.056944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:20:33.056966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-10-24T16:20:33.057025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:20:33.057030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:20:33.057073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:20:33.057088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:924:2788] TestWaitNotification: OK eventTxId 106 2025-10-24T16:20:33.057140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057160Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 26us result status StatusPathDoesNotExist 2025-10-24T16:20:33.057187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057235Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 12us result status StatusPathDoesNotExist 2025-10-24T16:20:33.057246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.057289Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2025-10-24T16:20:33.057345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-10-24T16:20:33.057406Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-10-24T16:20:33.057412Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-10-24T16:20:33.057418Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-10-24T16:20:33.057424Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:32.712483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:32.712503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.712509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:32.712516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:32.712523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:32.712527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:32.712538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.712548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:32.712637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:32.712680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:32.723191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:32.723210Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:32.725611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:32.725637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:32.725667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:32.726715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:32.726841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:32.726922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.726982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:32.727669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.727698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:32.727891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.727900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.727917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:32.727922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.727927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:32.727945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.728899Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:32.742582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.742640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.742681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:32.742687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:32.742740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:32.742749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:32.743258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:32.743318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:32.743327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:32.743330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:32.743577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:32.743840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.743872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.743877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.744312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.744611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:32.744649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:32.744827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.744848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.744857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.744909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:32.744914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.744934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.744942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:32.745245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.745251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... T16:20:32.898938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:20:32.898953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:20:32.898958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-10-24T16:20:32.898963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 3 2025-10-24T16:20:32.898967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-10-24T16:20:32.898977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-10-24T16:20:32.899152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6705: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409549, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 107 Step: 200 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409550 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 293 } } CommitVersion { Step: 200 TxId: 107 } 2025-10-24T16:20:32.899161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409552, partId: 0 2025-10-24T16:20:32.899182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72075186233409549, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 107 Step: 200 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409550 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 293 } } CommitVersion { Step: 200 TxId: 107 } 2025-10-24T16:20:32.899198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409549, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 107 Step: 200 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409550 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 293 } } CommitVersion { Step: 200 TxId: 107 } 2025-10-24T16:20:32.899553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72075186233409549, at schemeshard: 72075186233409549, message: Source { RawX1: 758 RawX2: 4294969947 } Origin: 72075186233409552 State: 2 TxId: 107 Step: 0 Generation: 2 2025-10-24T16:20:32.899563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409552, partId: 0 2025-10-24T16:20:32.899578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72075186233409549, message: Source { RawX1: 758 RawX2: 4294969947 } Origin: 72075186233409552 State: 2 TxId: 107 Step: 0 Generation: 2 2025-10-24T16:20:32.899585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409549 2025-10-24T16:20:32.899594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409549 message: Source { RawX1: 758 RawX2: 4294969947 } Origin: 72075186233409552 State: 2 TxId: 107 Step: 0 Generation: 2 2025-10-24T16:20:32.899615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72075186233409549:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409549 2025-10-24T16:20:32.899620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.899625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-10-24T16:20:32.899631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 107:0 129 -> 240 2025-10-24T16:20:32.900360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-10-24T16:20:32.900391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-10-24T16:20:32.900435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.900634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.900687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.900695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 107:0 ProgressState 2025-10-24T16:20:32.900706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#107:0 progress is 1/1 2025-10-24T16:20:32.900710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:20:32.900715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#107:0 progress is 1/1 2025-10-24T16:20:32.900718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:20:32.900722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-10-24T16:20:32.900734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:788:2673] message: TxId: 107 2025-10-24T16:20:32.900740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:20:32.900745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 107:0 2025-10-24T16:20:32.900749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 107:0 2025-10-24T16:20:32.900767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-10-24T16:20:32.901131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.901144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:789:2674] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-10-24T16:20:32.901934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2025-10-24T16:20:32.901968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:523: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.902007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2025-10-24T16:20:32.902394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-10-24T16:20:32.902428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2025-10-24T16:20:32.903023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2025-10-24T16:20:32.903050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:523: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2025-10-24T16:20:32.903076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2025-10-24T16:20:32.903469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-10-24T16:20:32.903500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.139839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.139885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.139892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.139899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.139906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.139911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.139920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.139935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.140053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.140120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.156749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.156772Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.160867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.160899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.160949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.162398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.162628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.162738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.162803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.163800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.163836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.164217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.164259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.164301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.164310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.164316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.164339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.165981Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.191074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.191148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.191201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.191211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.191250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.191266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.191899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.191946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.191997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.192007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.192013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.192019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.192474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.192486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.192492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.192891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.192903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.192909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.192916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.193756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.194234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.194284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.194496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.194521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.194528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.194589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.194596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.194643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.194655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.195400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.195410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... p Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.171550Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 34359740528 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.171555Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-10-24T16:20:32.171613Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 128 -> 240 2025-10-24T16:20:32.171620Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-10-24T16:20:32.171638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.171674Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:586: Send TEvUpdateTenantSchemeShard, to actor: [8:398:2367], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.172091Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6242: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } 2025-10-24T16:20:32.172117Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 }, at schemeshard: 72075186234409546 2025-10-24T16:20:32.172162Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:599: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-10-24T16:20:32.172224Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.172230Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:20:32.172258Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.172263Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:20:32.172277Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.172283Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:761: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-10-24T16:20:32.172287Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 240 -> 240 2025-10-24T16:20:32.172453Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:20:32.172465Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:20:32.172470Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:20:32.172475Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-10-24T16:20:32.172480Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-10-24T16:20:32.172492Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-10-24T16:20:32.172865Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6202: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-10-24T16:20:32.172878Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.172903Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:578: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:398:2367], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:32.172916Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-10-24T16:20:32.172920Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-10-24T16:20:32.172940Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-10-24T16:20:32.172944Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:495:2438], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-10-24T16:20:32.173163Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-10-24T16:20:32.173297Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.173306Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:20:32.173319Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.173324Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.173329Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.173333Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.173337Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:20:32.173342Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.173347Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:20:32.173351Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:20:32.173361Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.173690Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.173704Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-10-24T16:20:32.173986Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:20:32.173994Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:20:32.174054Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:20:32.174071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.174076Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:579:2520] TestWaitNotification: OK eventTxId 103 |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests >> PrivateApi::Nodes [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.578730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.578756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.578761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.578769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.578775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.578779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.578795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.578809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.578923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.578986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.595395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.595421Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.598941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.598974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.599029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.600733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.600969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.601074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.601165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.604270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.604488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.604531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.604537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.604557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.605856Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.624447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.624524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.624578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.624585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.624623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.624637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.626016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.626051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.626100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.626110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.626115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.626121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.626648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.626661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.626671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.627232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.627249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.627254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.627261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.627914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.628408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.628433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.628582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.628601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.628607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.628662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.628669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.628694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.628705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.629413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.629426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 9551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:32.794252Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:20:32.794255Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:20:32.794269Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:20:32.794272Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:448:2401], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-10-24T16:20:32.794357Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 0 2025-10-24T16:20:32.794421Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.794426Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:20:32.794436Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.794439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.794442Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.794444Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.794447Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:20:32.794450Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.794453Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:20:32.794456Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:20:32.794465Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:20:32.794700Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.794710Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:20:32.794885Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:20:32.794890Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:20:32.794929Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:20:32.794940Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.794943Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:526:2477] TestWaitNotification: OK eventTxId 103 2025-10-24T16:20:32.794992Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.795011Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 25us result status StatusSuccess 2025-10-24T16:20:32.795074Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1288490188800 data_size_soft_quota: 1224065679360 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.795118Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.795127Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 10us result status StatusSuccess 2025-10-24T16:20:32.795156Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1288490188800 data_size_soft_quota: 1224065679360 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.795192Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:20:32.795201Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 10us result status StatusSuccess 2025-10-24T16:20:32.795231Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1288490188800 data_size_soft_quota: 1224065679360 } SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.166043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.166072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.166079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.166087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.166095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.166100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.166111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.166127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.166270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.166337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.183296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.183325Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.191266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.191312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.191375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.193095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.193309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.193421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.193511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.194529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.194577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.194861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.194873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.194907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.194917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.194924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.194953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.196611Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.221347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.221451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.221546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.221556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.221601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.221620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.222548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.222601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.222680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.222694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.222701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.222707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.223195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.223208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.223214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.223574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.223585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.223592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.223601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.224376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.224845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.224905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.225153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.225183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.225191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.225259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.225266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.225301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.225315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.225787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.225797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... SHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:20:32.132393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:20:32.132424Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.132733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.132891Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2025-10-24T16:20:32.132927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:20:32.132981Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:20:32.133044Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:20:32.133089Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.133115Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-10-24T16:20:32.133165Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-10-24T16:20:32.133190Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:20:32.133211Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:20:32.133245Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2025-10-24T16:20:32.133288Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.133308Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:20:32.133333Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:20:32.133380Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.133384Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.133401Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:20:32.133451Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.133458Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.133465Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.134222Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:20:32.134236Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-10-24T16:20:32.134251Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:20:32.134254Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:20:32.134260Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:20:32.134262Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-10-24T16:20:32.134268Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:20:32.134272Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-10-24T16:20:32.134297Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134310Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:20:32.134376Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:20:32.134382Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:20:32.134428Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134451Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.134456Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:599:2541] TestWaitNotification: OK eventTxId 103 2025-10-24T16:20:32.134512Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134537Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2025-10-24T16:20:32.134561Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134604Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134621Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2025-10-24T16:20:32.134673Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.134736Z node 6 :HIVE INFO: tablet_helpers.cpp:1586: [72057594037968897] TEvRequestHiveInfo, msg: >> IndexBuildTestReboots::UniqueIndexValidationFailsBetweenShards >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> Yq_1::DeleteConnections [GOOD] >> IndexBuildTestReboots::CancelBuild >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> Yq_1::ModifyConnections [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> Yq_1::Basic_TaggedLiteral [GOOD] |64.7%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.097625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.097650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.097655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.097662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.097669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.097673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.097683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.097697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.097812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.097877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.113624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.113649Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.116866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.116900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.116951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.118286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.118440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.118509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.118565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.119405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.119445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.119642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.119652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.119680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.119688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.119695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.119718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.120692Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.146598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.146669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.146724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.146732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.146770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.146784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.149873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.149930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.150007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.150020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.150028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.150035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.150983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.151007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.151019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.152194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.152213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.152219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.152227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.152974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.153465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.153533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.153754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.153782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.153790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.153855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.153865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.153969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.153985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.154497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.154507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 2075186233409546 2025-10-24T16:20:34.383411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:729:2634], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-10-24T16:20:34.383416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:729:2634], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-10-24T16:20:34.383499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-10-24T16:20:34.383505Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-10-24T16:20:34.383533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-10-24T16:20:34.383601Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-10-24T16:20:34.383609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-10-24T16:20:34.383611Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-10-24T16:20:34.383615Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-10-24T16:20:34.383618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-10-24T16:20:34.383654Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-10-24T16:20:34.383660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-10-24T16:20:34.383662Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-10-24T16:20:34.383664Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-10-24T16:20:34.383668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-10-24T16:20:34.383676Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-10-24T16:20:34.384051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-10-24T16:20:34.384070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-10-24T16:20:34.384073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-10-24T16:20:34.384131Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-10-24T16:20:34.384155Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-10-24T16:20:34.384266Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6287: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-10-24T16:20:34.384272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-10-24T16:20:34.384284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-10-24T16:20:34.384287Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-10-24T16:20:34.384292Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-10-24T16:20:34.384302Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 116:0 2 -> 3 2025-10-24T16:20:34.384437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-10-24T16:20:34.384449Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-10-24T16:20:34.384808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-10-24T16:20:34.384847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-10-24T16:20:34.384851Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:216: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-10-24T16:20:34.384858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:236: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-10-24T16:20:34.384903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:252: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 677 RawX2: 30064773665 } TxBody: "\n\342\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-10-24T16:20:34.385391Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-10-24T16:20:34.385412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-10-24T16:20:34.388483Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-10-24T16:20:34.388815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-10-24T16:20:34.388846Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 |64.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> Yq_1::ModifyQuery >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.152643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.152666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.152672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.152679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.152685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.152690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.152698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.152712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.152826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.152889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.169218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.169239Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.172449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.172482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.172535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.173855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.174009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.174108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.174178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.175061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.175101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.175305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.175317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.175347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.175354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.175360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.175386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.176668Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.200612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.200672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.200720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.200727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.200765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.200778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.201302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.201337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.201383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.201393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.201398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.201404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.201832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.201845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.201850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.202227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.202239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.202244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.202251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.202926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.203330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.203380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.203579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.203605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.203612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.203668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.203676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.203712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.203724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.204184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.204193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 108 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000005 2025-10-24T16:20:31.820876Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:31.820891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 30064773231 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:31.820897Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-10-24T16:20:31.820934Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 128 -> 240 2025-10-24T16:20:31.820942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-10-24T16:20:31.820963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:20:31.820972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:20:31.820981Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:20:31.821087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-10-24T16:20:31.821372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-10-24T16:20:31.821664Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:31.821671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:31.821701Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:20:31.821717Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:31.821722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:212:2213], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-10-24T16:20:31.821728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:212:2213], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-10-24T16:20:31.821766Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:20:31.821773Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-10-24T16:20:31.821789Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:20:31.821794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:20:31.821800Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:20:31.821803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:20:31.821807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-10-24T16:20:31.821810Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:20:31.821814Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 108:0 2025-10-24T16:20:31.821817Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 108:0 2025-10-24T16:20:31.821825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:20:31.821829Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2025-10-24T16:20:31.821832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:20:31.821834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-10-24T16:20:31.821967Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:20:31.821978Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:20:31.821981Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2025-10-24T16:20:31.821984Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:20:31.821987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-10-24T16:20:31.822090Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:20:31.822097Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:20:31.822099Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-10-24T16:20:31.822102Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-10-24T16:20:31.822107Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:20:31.822114Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-10-24T16:20:31.822732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-10-24T16:20:31.822967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-10-24T16:20:31.823022Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-10-24T16:20:31.823030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-10-24T16:20:31.823094Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-10-24T16:20:31.823112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-10-24T16:20:31.823117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:488:2458] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-10-24T16:20:31.823607Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:31.823632Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2025-10-24T16:20:31.823649Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:101, at schemeshard: 72057594046678944 2025-10-24T16:20:31.824038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:101" TxId: 109 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 108, at schemeshard: 72057594046678944 2025-10-24T16:20:31.824064Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:101, operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.370423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.370449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.370455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.370464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.370473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.370477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.370488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.370505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.370645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.370720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.387645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.387667Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.391108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.391146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.391209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.392767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.392925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.393031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.393114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.394069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.394117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.394358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.394372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.394404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.394413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.394420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.394446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.395808Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.419625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.419717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.419781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.419790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.419836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.419867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.420636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.420684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.420740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.420751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.420758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.420765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.421219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.421230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.421236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.421609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.421621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.421628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.421636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.422295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.422658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.422705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.422863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.422884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.422890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.422941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.422946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.422981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.422994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.423421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.423428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-10-24T16:20:32.004574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-10-24T16:20:32.005253Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:20:32.005273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:20:32.005279Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:20:32.005283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:20:32.005288Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:20:32.005409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.005418Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:20:32.005431Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.005436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.005441Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:20:32.005445Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.005450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:20:32.005455Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:20:32.005461Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:20:32.005465Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:20:32.005510Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.005668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.005861Z node 7 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2025-10-24T16:20:32.005890Z node 7 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:20:32.005909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:20:32.005969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-10-24T16:20:32.006161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.006184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:20:32.006305Z node 7 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-10-24T16:20:32.006333Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:20:32.006351Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:20:32.006435Z node 7 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2025-10-24T16:20:32.006475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:20:32.006492Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:20:32.006523Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:20:32.006564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.006568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.006584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:20:32.006609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.006613Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.006620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.007524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:20:32.007537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-10-24T16:20:32.007549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:20:32.007551Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:20:32.007558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:20:32.007560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-10-24T16:20:32.007566Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:20:32.007570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-10-24T16:20:32.007579Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:20:32.007605Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:20:32.007654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:20:32.007659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:20:32.007704Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:20:32.007716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.007720Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:582:2524] TestWaitNotification: OK eventTxId 103 2025-10-24T16:20:32.007781Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.007810Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusPathDoesNotExist 2025-10-24T16:20:32.007839Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpcc/ydb-tests-functional-tpcc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-10-24T16:20:29.966398Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823951176866793:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:29.966486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:30.012491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions E1024 16:20:30.032143479 9140 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.032214509 9140 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.035514Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12647 2025-10-24T16:20:30.035874Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.040928Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.041726Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.041818Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.041835Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.041911Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.041925Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042010Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042063Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042131Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042275Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042290Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042332Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042339Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.042392Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.054813Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.055237Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.076065Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.077147Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.078998Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.080717Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.080736Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.085420Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.085657Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.085682Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.085759Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.085760Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10-24T16:20:30.088748Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:12647: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12647 } ] 2025-10- ... Set execution timeout 299.985090s 2025-10-24T16:20:33.970024Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1452: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Create sink for output 0 { Sink { Type: "KqpTableSink" Settings { type_url: "type.googleapis.com/NKikimrKqp.TKqpTableSinkSettings" value: "\032\035\n\rRoot/yq/nodes\020\200\202\224\204\200\200\200\200\001\030\013(\001\"\r\n\006tenant\020\001 \201 \"\r\n\007node_id\020\003 \002*\024\n\016active_workers\020\005 \004*\022\n\013data_center\020\013 \201 *\017\n\texpire_at\020\010 2*\017\n\010hostname\020\004 \201 *\022\n\013instance_id\020\002 \201 *\027\n\021interconnect_port\020\t \002*\026\n\020memory_allocated\020\007 \004*\022\n\014memory_limit\020\006 \004*\023\n\014node_address\020\n \201 *\r\n\007node_id\020\003 \002*\r\n\006tenant\020\001 \201 0\232\247\200\200\200\200@8\007@\000H\001R\022\t\207\341\207_Q\247\373h\021M\t\000\000\007\000\020\000X\000`\000h\004h\nh\007h\003h\002h\010h\006h\005h\th\001h\000r\022P\320\260\226\270\2413X\377\377\377\377\377\377\377\377\377\001x\000\262\001\000" } } } 2025-10-24T16:20:33.970082Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-10-24T16:20:33.970091Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2025-10-24T16:20:33.970106Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:389: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-10-24T16:20:33.970122Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4067: TxId: 281474976715675, task: 1. Add data: 101 / 101 2025-10-24T16:20:33.970134Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4036: TxId: 281474976715675, task: 1. Send data=101, closed=1, bufferActorId=[7:7564823967035810183:2381] 2025-10-24T16:20:33.970142Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:403: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 101 2025-10-24T16:20:33.970146Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715675, task: 1. Tasks execution finished 2025-10-24T16:20:33.970147Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1588: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-10-24T16:20:33.970151Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-10-24T16:20:33.970153Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2025-10-24T16:20:33.970156Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715675, task: 1. Tasks execution finished 2025-10-24T16:20:33.970157Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1588: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-10-24T16:20:33.970163Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:33.970170Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715675, task: 1. Tasks execution finished 2025-10-24T16:20:33.970172Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1588: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-10-24T16:20:33.970183Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2307: SelfId: [7:7564823967035810183:2381], SessionActorId: [7:7564823967035808985:2381], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:11:1]). lockId=281474976715674. ActorId=[7:7564823967035810190:2381] 2025-10-24T16:20:33.970194Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:447: Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Open: token=0 2025-10-24T16:20:33.970203Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2497: SelfId: [7:7564823967035810183:2381], SessionActorId: [7:7564823967035808985:2381], ProcessRequestQueue [OwnerId: 72057594046644480, LocalPathId: 11] NOT READY queue=1 2025-10-24T16:20:33.970217Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:456: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Write: token=0 2025-10-24T16:20:33.970438Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:466: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Close: token=0 2025-10-24T16:20:33.970452Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3931: SelfId: [7:7564823967035810189:2381], TxId: 281474976715675, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7564823967035810183:2381] 2025-10-24T16:20:33.970455Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3949: SelfId: [7:7564823967035810189:2381], TxId: 281474976715675, task: 1. Finished 2025-10-24T16:20:33.970459Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:33.970467Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715675, task: 1. Tasks execution finished 2025-10-24T16:20:33.970469Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [7:7564823967035810187:2381], TxId: 281474976715675, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6128w0z2xs2wyfpdsd8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=MjQ1MjEyYTEtZThlNTg1ZDgtYWFjMTM0YmYtZmEyOTQ5Yzc=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:33.970483Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715675, task: 1. pass away 2025-10-24T16:20:33.970503Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715675;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:33.970868Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2666: SelfId: [7:7564823967035810183:2381], SessionActorId: [7:7564823967035808985:2381], Start immediate commit 2025-10-24T16:20:33.970879Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1032: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]SetImmediateCommit 2025-10-24T16:20:33.970881Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2564: SelfId: [7:7564823967035810183:2381], SessionActorId: [7:7564823967035808985:2381], Flush data 2025-10-24T16:20:33.970915Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1183: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Send EvWrite to ShardID=72075186224037889, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715674 DataShard: 72075186224037889 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 11, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=212 2025-10-24T16:20:33.972338Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:708: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Recv EvWriteResult from ShardID=72075186224037889, Status=STATUS_COMPLETED, TxId=3, Locks= , Cookie=1 2025-10-24T16:20:33.972346Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:978: SelfId: [7:7564823967035810190:2381], Table: `Root/yq/nodes` ([72057594046644480:11:1]), SessionActorId: [7:7564823967035808985:2381]Got completed result TxId=3, TabletId=72075186224037889, Cookie=1, Mode=3, Locks= 2025-10-24T16:20:33.972353Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3628: SelfId: [7:7564823967035810183:2381], SessionActorId: [7:7564823967035808985:2381], Committed TxId=0 |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> Yq_1::Basic_EmptyDict >> KqpQueryService::CloseSessionsWithLoad >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> Yq_1::DescribeQuery [GOOD] |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:33.207355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.207372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.207376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.207380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.207385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.207387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.207398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.207407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.207475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.207511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.217041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:33.217055Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.219103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.219122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.219150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.220102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.220150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.220213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.220259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.221087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.221123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.221319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.221327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.221350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.221357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.221364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.221386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.222500Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.236484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.236536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.236578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.236584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.236620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.236630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.237208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.237264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.237274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.237277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.237541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.237787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.237796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.237800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.238195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.238496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.238537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.238677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.238694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.238699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.238739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:33.238743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.238764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:33.238772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:33.239083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.239087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... t reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-10-24T16:20:33.389417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:20:33.389970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-10-24T16:20:33.390022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 2025-10-24T16:20:33.390279Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 Forgetting tablet 72075186234409550 2025-10-24T16:20:33.390536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-10-24T16:20:33.390567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:20:33.390633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-10-24T16:20:33.390694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:33.390700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:20:33.390721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:20:33.390772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:33.390778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:20:33.390787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:20:33.391273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:20:33.391285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-10-24T16:20:33.391294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-10-24T16:20:33.391297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-10-24T16:20:33.391561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:20:33.391574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-10-24T16:20:33.391607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-10-24T16:20:33.391667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:20:33.391671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:20:33.391715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:20:33.391732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:936:2801] TestWaitNotification: OK eventTxId 106 2025-10-24T16:20:33.391788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391816Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 36us result status StatusPathDoesNotExist 2025-10-24T16:20:33.391863Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391918Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391932Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 16us result status StatusPathDoesNotExist 2025-10-24T16:20:33.391944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.391989Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2025-10-24T16:20:33.392047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-10-24T16:20:33.392132Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-10-24T16:20:33.392145Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-10-24T16:20:33.392152Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-10-24T16:20:33.392160Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.4%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:32.829692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:32.829710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.829714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:32.829719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:32.829724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:32.829727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:32.829738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.829749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:32.829834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:32.829874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:32.840321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:32.840338Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:32.842864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:32.842893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:32.842922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:32.844219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:32.844380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:32.844452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.844507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:32.845434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.845472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:32.845676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.845685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.845702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:32.845707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.845711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:32.845733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.846934Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:32.870390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.870445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.870490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:32.870499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:32.870583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:32.870596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:32.871145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.871183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:32.871225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.871233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:32.871239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:32.871245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:32.871631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.871644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.871651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:32.872051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.872062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.872067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.872075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.872788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.873252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:32.873295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:32.873477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.873513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.873523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.873579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:32.873587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.873614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.873627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:32.874039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.874048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 4 2025-10-24T16:20:32.917883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:20:32.917891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-10-24T16:20:32.917904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:20:32.917919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:578: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2540], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:20:32.918373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.918381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:20:32.918406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.918411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:20:32.918449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.918454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:761: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-10-24T16:20:32.918457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 240 -> 240 2025-10-24T16:20:32.918535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:20:32.918543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:20:32.918546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:20:32.918549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:20:32.918553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-10-24T16:20:32.918562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-10-24T16:20:32.918931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.918940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:20:32.918948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:20:32.918951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:32.918954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:20:32.918956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:32.918959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-10-24T16:20:32.918963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:32.918966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:20:32.918969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:20:32.918994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:20:32.919078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:20:32.919307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:20:32.919312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:20:32.919354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:20:32.919364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.919367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:759:2645] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-10-24T16:20:32.919956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.919975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1079: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-10-24T16:20:32.919978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1085: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-10-24T16:20:32.919998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-10-24T16:20:32.920003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-10-24T16:20:32.920351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.920386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-10-24T16:20:32.920925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.920943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1079: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-10-24T16:20:32.920946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1085: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-10-24T16:20:32.920961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-10-24T16:20:32.920964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-10-24T16:20:32.921308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.921329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/compute/ut/unittest >> TIssuesBufferTest::TestUseAfterDump [GOOD] Test command err: 2025-10-24T16:20:04.047007Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert !!watermark failed 2025-10-24T16:20:04.047013Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 NO WATERMARK 2025-10-24T16:20:04.047572Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.047576Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.048520Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.048524Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.049288Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert !!watermark failed 2025-10-24T16:20:04.049292Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 NO WATERMARK 2025-10-24T16:20:04.049837Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.049841Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.050951Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.050955Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.051856Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert !!watermark failed 2025-10-24T16:20:04.051861Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 NO WATERMARK 2025-10-24T16:20:04.052603Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.052607Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.053684Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.053689Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.054239Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=0 channels=1 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:01.000000Z == 1970-01-01T00:00:04.000000Z, Watermark Delay is 3.000000s 2025-10-24T16:20:04.055449Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.055453Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.056327Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.056331Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.056692Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=0 channels=1 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:01.000000Z == 1970-01-01T00:00:05.000000Z, Watermark Delay is 4.000000s 2025-10-24T16:20:04.057791Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.057795Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.058717Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.058721Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.059480Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=51 watermarkPeriod=1 waitIntermediateAcks=0 channels=1 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:34.000000Z == 1970-01-01T00:00:51.000000Z, Watermark Delay is 17.000000s 2025-10-24T16:20:04.061383Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=51 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:48.000000Z == 1970-01-01T00:00:51.000000Z, Watermark Delay is 3.000000s 2025-10-24T16:20:04.062822Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=51 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:42.000000Z == 1970-01-01T00:00:51.000000Z, Watermark Delay is 9.000000s 2025-10-24T16:20:04.065514Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=128 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:04.000000Z == 1970-01-01T00:02:08.000000Z, Watermark Delay is 4.000000s 2025-10-24T16:20:04.067158Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=128 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:01:45.000000Z == 1970-01-01T00:02:08.000000Z, Watermark Delay is 23.000000s 2025-10-24T16:20:04.069233Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=128 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:06.000000Z == 1970-01-01T00:02:08.000000Z, Watermark Delay is 2.000000s 2025-10-24T16:20:04.074078Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=251 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:14.000000Z == 1970-01-01T00:04:11.000000Z, Watermark Delay is 117.000000s 2025-10-24T16:20:04.076604Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=251 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:03:46.000000Z == 1970-01-01T00:04:11.000000Z, Watermark Delay is 25.000000s 2025-10-24T16:20:04.080277Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=1 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:03.000000Z == 1970-01-01T00:00:07.000000Z, Watermark Delay is 4.000000s 2025-10-24T16:20:04.080692Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert !!watermark failed 2025-10-24T16:20:04.080696Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 NO WATERMARK 2025-10-24T16:20:04.081320Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert !!watermark failed 2025-10-24T16:20:04.081324Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 NO WATERMARK 2025-10-24T16:20:04.082149Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:820: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert !!watermark failed 2025-10-24T16:20:04.082153Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:826: Square Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 NO WATERMARK 2025-10-24T16:20:04.083371Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=38 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:32.000000Z == 1970-01-01T00:00:38.000000Z, Watermark Delay is 6.000000s 2025-10-24T16:20:04.095644Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=536 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:03:27.000000Z == 1970-01-01T00:08:56.000000Z, Watermark Delay is 329.000000s 2025-10-24T16:20:04.106204Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=536 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:08:44.000000Z == 1970-01-01T00:08:56.000000Z, Watermark Delay is 12.000000s 2025-10-24T16:20:04.109813Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=173 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:12.000000Z == 1970-01-01T00:02:53.000000Z, Watermark Delay is 41.000000s 2025-10-24T16:20:04.114546Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=173 watermarkPeriod=1 waitIntermediateAcks=0 channels=16 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:10.000000Z == 1970-01-01T00:02:53.000000Z, Watermark Delay is 43.000000s 2025-10-24T16:20:04.117477Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=136 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:01:52.000000Z == 1970-01-01T00:02:16.000000Z, Watermark Delay is 24.000000s 2025-10-24T16:20:04.119238Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=136 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:12.000000Z == 1970-01-01T00:02:16.000000Z, Watermark Delay is 4.000000s 2025-10-24T16:20:04.127009Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=294 watermarkPeriod=1 waitIntermediateAcks=0 channels=3 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:03:03.000000Z == 1970-01-01T00:04:54.000000Z, Watermark Delay is 111.000000s 2025-10-24T16:20:04.130224Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:823: Square Test for: packets=294 watermarkPeriod=1 waitIntermediateAcks=0 channels=7 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:04:53.000000Z == 1970-01-01T00:04:54.000000Z, Watermark Delay is 1.00000 ... 25-10-24T16:20:23.825097Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=5 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 NO WATERMARK 2025-10-24T16:20:23.865410Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=128 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:01:57.000000Z == 1970-01-01T00:02:06.000000Z, Watermark Delay is 9.000000s 2025-10-24T16:20:23.962984Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=251 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:03:54.000000Z == 1970-01-01T00:04:09.000000Z, Watermark Delay is 15.000000s 2025-10-24T16:20:23.975479Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=7 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert !!watermark failed 2025-10-24T16:20:23.975483Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=7 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 NO WATERMARK 2025-10-24T16:20:24.284966Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=536 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:08:42.000000Z == 1970-01-01T00:08:54.000000Z, Watermark Delay is 12.000000s 2025-10-24T16:20:24.538183Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=294 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:04:51.000000Z == 1970-01-01T00:04:54.000000Z, Watermark Delay is 3.000000s 2025-10-24T16:20:24.701739Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=656 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:10:39.000000Z == 1970-01-01T00:10:54.000000Z, Watermark Delay is 15.000000s 2025-10-24T16:20:24.901500Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=468 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:15.000000Z == 1970-01-01T00:07:48.000000Z, Watermark Delay is 333.000000s 2025-10-24T16:20:25.248277Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=556 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:04:54.000000Z == 1970-01-01T00:09:15.000000Z, Watermark Delay is 261.000000s 2025-10-24T16:20:25.766849Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=567 watermarkPeriod=3 waitIntermediateAcks=0 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:06:12.000000Z == 1970-01-01T00:09:27.000000Z, Watermark Delay is 195.000000s 2025-10-24T16:20:27.367819Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.367828Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=1 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.369706Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.369718Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=2 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.371946Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.371951Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=3 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.373861Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.373867Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=4 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.376594Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.376600Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=5 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.410471Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=128 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:01.000000Z == 1970-01-01T00:02:08.000000Z, Watermark Delay is 7.000000s 2025-10-24T16:20:27.491154Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:27.491160Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=7 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:27.505296Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=38 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:33.000000Z == 1970-01-01T00:00:38.000000Z, Watermark Delay is 5.000000s 2025-10-24T16:20:27.612435Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=536 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:08:48.000000Z == 1970-01-01T00:08:56.000000Z, Watermark Delay is 8.000000s 2025-10-24T16:20:27.900661Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=468 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:07:47.000000Z == 1970-01-01T00:07:48.000000Z, Watermark Delay is 1.000000s 2025-10-24T16:20:27.966849Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=434 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:07:13.000000Z == 1970-01-01T00:07:14.000000Z, Watermark Delay is 1.000000s 2025-10-24T16:20:28.254890Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=343 watermarkPeriod=1 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:05:35.000000Z == 1970-01-01T00:05:43.000000Z, Watermark Delay is 8.000000s 2025-10-24T16:20:28.378127Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=3 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:28.378132Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=3 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:28.379302Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=4 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:28.379306Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=4 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:28.380546Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=5 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:28.380551Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=5 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:28.396552Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=128 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:01:51.000000Z == 1970-01-01T00:02:06.000000Z, Watermark Delay is 15.000000s 2025-10-24T16:20:28.418729Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=251 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:03:24.000000Z == 1970-01-01T00:04:09.000000Z, Watermark Delay is 45.000000s 2025-10-24T16:20:28.420351Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:985: InputTransform Test for: packets=7 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert !!watermark failed 2025-10-24T16:20:28.420357Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:991: InputTransform Test for: packets=7 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 NO WATERMARK 2025-10-24T16:20:28.471229Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=536 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:08:51.000000Z == 1970-01-01T00:08:54.000000Z, Watermark Delay is 3.000000s 2025-10-24T16:20:28.485831Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=173 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:02:30.000000Z == 1970-01-01T00:02:51.000000Z, Watermark Delay is 21.000000s 2025-10-24T16:20:28.497447Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=136 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:01:51.000000Z == 1970-01-01T00:02:15.000000Z, Watermark Delay is 24.000000s 2025-10-24T16:20:28.681690Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=434 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:06:21.000000Z == 1970-01-01T00:07:12.000000Z, Watermark Delay is 51.000000s 2025-10-24T16:20:28.702840Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=248 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:04:00.000000Z == 1970-01-01T00:04:06.000000Z, Watermark Delay is 6.000000s 2025-10-24T16:20:28.853301Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=343 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:04:54.000000Z == 1970-01-01T00:05:42.000000Z, Watermark Delay is 48.000000s 2025-10-24T16:20:28.861731Z :KQP_COMPUTE ERROR: dq_async_compute_actor_ut.cpp:988: InputTransform Test for: packets=60 watermarkPeriod=3 waitIntermediateAcks=1 channels=11 Assert *watermark == expectedWatermark failed Expected 1970-01-01T00:00:54.000000Z == 1970-01-01T00:01:00.000000Z, Watermark Delay is 6.000000s 2025-10-24T16:20:29.004346Z :KQP_COMPUTE ERROR: dq_compute_actor_channels.cpp:133: TxId: TxId, task: 0. Unexpected input channelId: 0 seqNo: 0, expected: 1 2025-10-24T16:20:29.005683Z :KQP_COMPUTE ERROR: dq_compute_actor_channels.cpp:133: TxId: TxId, task: 0. Unexpected input channelId: 0 seqNo: 0, expected: 1 |64.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-10-24T16:20:30.133763Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823956387744891:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:30.133784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:30.168829360 9684 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.168871230 9684 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.175951Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.179987Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24269 2025-10-24T16:20:30.187897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:30.191109Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.194036Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.196256Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.196279Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.196287Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.196293Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.196999Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197030Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197036Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197094Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197114Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.197465Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.210371Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.217053Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.217089Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.217458Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218154Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218261Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218325Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218333Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218346Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.218902Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.219656Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.219668Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-24T16:20:30.219674Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24269: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24269 } ] 2025-10-2 ... . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7564823977416807059:2716] 2025-10-24T16:20:35.474018Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-10-24T16:20:35.474040Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823977416807059 RawX2: 4503616807242396 } } DstEndpoint { ActorId { RawX1: 7564823977416807060 RawX2: 4503616807242397 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823977416807060 RawX2: 4503616807242397 } } DstEndpoint { ActorId { RawX1: 7564823977416807055 RawX2: 4503616807242155 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-10-24T16:20:35.474050Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474162Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. Recv TEvReadResult from ShardID=72075186224037895, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:35.474172Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. Taken 0 locks 2025-10-24T16:20:35.474174Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:35.474178Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:35.474181Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474184Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:35.474186Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. enter pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:35.474194Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. exit pack cells method shardId: 72075186224037895 processedRows: 0 packed rows: 1 freeSpace: 8386358 2025-10-24T16:20:35.474196Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. returned 1 rows; processed 1 rows 2025-10-24T16:20:35.474206Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. dropping batch for read #0 2025-10-24T16:20:35.474211Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:35.474212Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:35.474213Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715715, task: 1, CA Id [4:7564823977416807059:2716]. returned async data processed rows 1 left freeSpace 8386358 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:35.474260Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.474262Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-10-24T16:20:35.474265Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474270Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715715, task: 2. Finish input channelId: 1, from: [4:7564823977416807059:2716] 2025-10-24T16:20:35.474272Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715715, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-10-24T16:20:35.474274Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:35.474275Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474278Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474279Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715715, task: 1. Tasks execution finished 2025-10-24T16:20:35.474281Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823977416807059:2716], TxId: 281474976715715, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.474298Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715715, task: 1. pass away 2025-10-24T16:20:35.474314Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.474317Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715715;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:35.474354Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.474362Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715715, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:35.474363Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715715, task: 2. Tasks execution finished 2025-10-24T16:20:35.474364Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823977416807060:2717], TxId: 281474976715715, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7ff25ptc481jmyrx9vt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZGE4OWY0NDktM2E3ZmQyYS0yMzc2NDU0NC1lNjc2ZGI5Mw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.474373Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715715, task: 2. pass away 2025-10-24T16:20:35.474384Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715715;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> Yq_1::DeleteQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.439788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.439807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.439811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.439816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.439821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.439824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.439831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.439843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.439991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.440064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.452444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.452462Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.455044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.455077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.455133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.456546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.456699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.456810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.456870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.457807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.457848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.458045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.458057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.458087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.458095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.458102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.458126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.459601Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.479732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.479807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.479883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.479890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.479929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.479946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.480706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.480760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.480820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.480844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.480851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.480857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.481376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.481404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.481413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.481880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.481893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.481900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.481921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.482634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.483018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.483072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.483288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.483319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.483326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.483391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.483398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.483439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.483449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.483910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.483917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 696Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 134 2025-10-24T16:20:32.630818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.631205Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.631221Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 134 -> 135 2025-10-24T16:20:32.631236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.631242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:20:32.631486Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.631491Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.631506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:20:32.631522Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.631526Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-10-24T16:20:32.631529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:20:32.631555Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.631560Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2025-10-24T16:20:32.631563Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 135 -> 240 2025-10-24T16:20:32.631638Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631646Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631649Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:20:32.631654Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:20:32.631657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:20:32.631723Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.631733Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:20:32.631736Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:20:32.631738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:20:32.631744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:20:32.632295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.632308Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:20:32.632322Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:20:32.632327Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.632332Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:20:32.632336Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.632344Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:20:32.632348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:20:32.632351Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:20:32.632353Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:20:32.632361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:20:32.632392Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.632397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.632407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:20:32.632469Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:20:32.632501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:20:32.632509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.632687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:20:32.632744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:20:32.633124Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:20:32.633139Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:20:32.633177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:20:32.633182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:20:32.633232Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:20:32.633244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.633248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:345:2335] TestWaitNotification: OK eventTxId 102 2025-10-24T16:20:32.633301Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.633321Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusPathDoesNotExist 2025-10-24T16:20:32.633349Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-10-24T16:20:29.980492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823950147164203:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:29.980518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:29.994732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E1024 16:20:30.033346433 9160 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.033405182 9160 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.035075Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23133 2025-10-24T16:20:30.036192Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042325Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042684Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042695Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042722Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042902Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042908Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042916Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042935Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.042988Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.043082Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.043109Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.043153Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.043808Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.043823Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.053702Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.054047Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.054245Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.055477Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.055496Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.055578Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.058196Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.059790Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.059972Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.062132Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.062697Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.062717Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.062932Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:23133: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23133 } ] 2025-10-24T16:20:30.0 ... 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:644: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Processing resolved ShardId# 72075186224037897, partition range: [(String : yandexcloud://some_folder_id, String : utqudu2o92gvsnlm54es) ; ()), i: 0, state ranges: 0, points: 1 2025-10-24T16:20:34.885044Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:682: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Add point to new shardId: 72075186224037897 2025-10-24T16:20:34.885051Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:731: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Pending shards States: TShardState{ TabletId: 72075186224037897, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudu2o92gvsnlm54es)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudu2o92gvsnlm54es)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-10-24T16:20:34.885053Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:34.885054Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:471: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. BEFORE: 1.0 2025-10-24T16:20:34.885062Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:902: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Send EvRead to shardId: 72075186224037897, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-10-24T16:20:34.885070Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:485: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. AFTER: 0.1 2025-10-24T16:20:34.885075Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-10-24T16:20:34.885245Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:34.885251Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Taken 0 locks 2025-10-24T16:20:34.885252Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:34.885256Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:34.885259Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:34.885262Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:34.885264Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:34.885271Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-10-24T16:20:34.885274Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. returned 1 rows; processed 1 rows 2025-10-24T16:20:34.885285Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. dropping batch for read #0 2025-10-24T16:20:34.885289Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:34.885290Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:34.885292Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715711, task: 1, CA Id [4:7564823972937101516:2705]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:34.885327Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:34.885332Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101517:2706], TxId: 281474976715711, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2025-10-24T16:20:34.885333Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:34.885338Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715711, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-10-24T16:20:34.885345Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715711, task: 2. Finish input channelId: 1, from: [4:7564823972937101516:2705] 2025-10-24T16:20:34.885353Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:34.885355Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101517:2706], TxId: 281474976715711, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:34.885360Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:34.885363Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715711, task: 1. Tasks execution finished 2025-10-24T16:20:34.885365Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823972937101516:2705], TxId: 281474976715711, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:34.885384Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715711, task: 1. pass away 2025-10-24T16:20:34.885402Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823972937101517:2706], TxId: 281474976715711, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:34.885410Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715711;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:34.885457Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823972937101517:2706], TxId: 281474976715711, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:34.885467Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715711, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:34.885468Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715711, task: 2. Tasks execution finished 2025-10-24T16:20:34.885470Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823972937101517:2706], TxId: 281474976715711, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb6y3chg8zbqrb0z4e413. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTU3ZWY3YmUtOTZkNDY2MTEtMTZlNjA0ZWYtNGM2ZTEzNmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:34.885486Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715711, task: 2. pass away 2025-10-24T16:20:34.885515Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715711;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.350874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.350902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.350909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.350917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.350924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.350929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.350940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.350956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.351082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.351148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.370245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.370264Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.375607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.375640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.375687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.377812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.377994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.378097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.378170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.378967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.378997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.379286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.379295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.379316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.379321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.379326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.379341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.380248Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.396946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.397015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.397067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.397072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.397106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.397117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.399085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.399140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.399210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.399227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.399234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.399241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.399769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.399784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.399792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.400235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.400250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.400256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.400263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.401038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.401534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.401590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.401807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.401836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.401845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.401911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.401920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.401962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.401975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.402438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.402449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 57594046678944 2025-10-24T16:20:32.107222Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.107226Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:32.107618Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.107632Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.107638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.107644Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.107671Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.107989Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:32.108014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:32.108146Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.108168Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 34359740528 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.108174Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.108231Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:32.108236Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.108255Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.108263Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:32.108594Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.108600Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.108626Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.108630Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:20:32.108678Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.108685Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:20:32.108697Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:20:32.108700Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.108704Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:20:32.108706Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.108709Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:20:32.108713Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.108716Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:20:32.108718Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:20:32.108726Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:20:32.108730Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:20:32.108733Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:20:32.108828Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:20:32.108836Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:20:32.108840Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:20:32.108843Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:20:32.108846Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.108855Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:20:32.109255Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:20:32.109314Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-10-24T16:20:32.110049Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.110075Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1079: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-10-24T16:20:32.110081Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1085: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-10-24T16:20:32.110104Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1102, at schemeshard: 72057594046678944 2025-10-24T16:20:32.110110Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1102, at schemeshard: 72057594046678944 2025-10-24T16:20:32.110167Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [8:274:2263] Bootstrap 2025-10-24T16:20:32.110376Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [8:274:2263] Become StateWork (SchemeCache [8:279:2268]) 2025-10-24T16:20:32.110594Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [8:274:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:20:32.111030Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1102" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.111066Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1102, operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-10-24T16:20:32.111130Z node 8 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:33.260748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.260769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.260774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.260780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.260786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.260791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.260802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.260814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.260915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.260963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.272960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:33.272976Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.275232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.275253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.275282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.276455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.276565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.276633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.276687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.277315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.277340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.277482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.277488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.277519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.277524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.277529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.277546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.278492Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.292925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.292974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.293016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.293023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.293096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.293106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.293625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.293651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.293679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.293685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.293689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.293692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.293993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.294001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.294005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.294345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.294357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.294361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.294366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.294847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.295196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.295229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.295346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.295362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.295368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.295412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:33.295417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.295436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:33.295443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:33.295883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.295892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... Complete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:20:33.361538Z node 1 :HIVE INFO: tablet_helpers.cpp:1635: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-10-24T16:20:33.361546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6414: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-10-24T16:20:33.361549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-10-24T16:20:33.361557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-10-24T16:20:33.361563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:796: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-10-24T16:20:33.361567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 106:0 138 -> 240 2025-10-24T16:20:33.361790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.361806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.361810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2025-10-24T16:20:33.361817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#106:0 progress is 1/1 2025-10-24T16:20:33.361820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:20:33.361823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#106:0 progress is 1/1 2025-10-24T16:20:33.361825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:20:33.361828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-10-24T16:20:33.361831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:20:33.361834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 106:0 2025-10-24T16:20:33.361837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 106:0 2025-10-24T16:20:33.361842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-10-24T16:20:33.362089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:20:33.362093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:20:33.362137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:20:33.362150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:845:2731] TestWaitNotification: OK eventTxId 106 2025-10-24T16:20:33.362204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362221Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 21us result status StatusSuccess 2025-10-24T16:20:33.362270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-10-24T16:20:33.362324Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 10us result status StatusSuccess 2025-10-24T16:20:33.362354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-10-24T16:20:33.362394Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362401Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 8us result status StatusSuccess 2025-10-24T16:20:33.362428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362472Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-10-24T16:20:30.088178Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823952659385289:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:30.088306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:30.104758861 9416 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.104802984 9416 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.107324Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:32121 2025-10-24T16:20:30.107664Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.123254Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.127747Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.130429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:30.131615Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131625Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131641Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131643Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131651Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131705Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131714Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131834Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131865Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131874Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.131969Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.132001Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.146230Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.161103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.164549Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.164664Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.168855Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.169784Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.169798Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.169965Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.169981Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.170162Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.170331Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.170637Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 } ] 2025-10-24T16:20:30.170657Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:32121: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:32121 ... sion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823978190699743 RawX2: 4503616807242393 } } DstEndpoint { ActorId { RawX1: 7564823978190699737 RawX2: 4503616807242062 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-10-24T16:20:35.418964Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1081: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7564823978190699742:2712] 2025-10-24T16:20:35.418979Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-10-24T16:20:35.419000Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823978190699742 RawX2: 4503616807242392 } } DstEndpoint { ActorId { RawX1: 7564823978190699743 RawX2: 4503616807242393 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823978190699743 RawX2: 4503616807242393 } } DstEndpoint { ActorId { RawX1: 7564823978190699737 RawX2: 4503616807242062 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-10-24T16:20:35.419009Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.419186Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:35.419196Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. Taken 0 locks 2025-10-24T16:20:35.419199Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:35.419204Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:35.419209Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.419212Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:35.419215Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:35.419218Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:35.419220Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. returned 0 rows; processed 0 rows 2025-10-24T16:20:35.419227Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. dropping batch for read #0 2025-10-24T16:20:35.419229Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. effective maxinflight 1 sorted 1 2025-10-24T16:20:35.419232Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:35.419236Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715712, task: 1, CA Id [4:7564823978190699742:2712]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:35.419256Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.419267Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-10-24T16:20:35.419296Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715712, task: 2. Finish input channelId: 1, from: [4:7564823978190699742:2712] 2025-10-24T16:20:35.419314Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.419316Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:35.419323Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.419328Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715712, task: 1. Tasks execution finished 2025-10-24T16:20:35.419331Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823978190699742:2712], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.419355Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 1. pass away 2025-10-24T16:20:35.419355Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.419388Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:35.419413Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.419426Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715712, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:35.419428Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715712, task: 2. Tasks execution finished 2025-10-24T16:20:35.419431Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823978190699743:2713], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7dp6fakt1vpw0s6cp1q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWM5ZTNiNjEtNjYwMDdjNjctZjZkMTU4ZjMtNGI0ZTFiNzE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.419448Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 2. pass away 2025-10-24T16:20:35.419479Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-10-24T16:20:29.771731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823949699222142:2149];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:29.771758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:29.806216412 8660 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:29.806255677 8660 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:29.807217Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.809219Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8220 2025-10-24T16:20:29.812709Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.815584Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.815744Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.815764Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.815837Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.815980Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.816151Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.816167Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.817397Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.817416Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.818830Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.820604Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.822792Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.822805Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.832461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:29.835592Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.838943Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.838963Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.839092Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.839728Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.839744Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.839752Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.842459Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.842496Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.844495Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.844732Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.844851Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.844861Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8220: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8220 } ] 2025-10-24T16:20:29.846186Z node 1 :YQ_CONTROL_PLANE_ST ... pp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347130Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347138Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347145Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347153Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347162Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347170Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347178Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347187Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347196Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347206Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347215Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347224Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347236Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347241Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347253Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347257Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347267Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347276Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347285Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347293Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347301Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347309Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347318Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347324Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347335Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347344Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347358Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347371Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347380Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347398Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347403Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347451Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347455Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347468Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347497Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347590Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347598Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347618Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347622Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347632Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347638Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347649Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347655Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347665Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347672Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347680Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347687Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347696Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347704Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347713Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347721Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347730Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347738Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347748Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347755Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347764Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347773Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347779Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347787Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347796Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347804Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347813Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347821Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347831Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347840Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347863Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347870Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347884Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347894Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347907Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347913Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347928Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347936Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347945Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347955Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347962Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347972Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347977Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347987Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.347995Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348000Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348010Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348016Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348026Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348035Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348046Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348051Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348061Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348068Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348082Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348087Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348102Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348107Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348117Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348123Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348131Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348139Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348147Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348156Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348164Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348170Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348181Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348193Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: 2025-10-24T16:20:35.348199Z node 4 :FQ_QUOTA_SERVICE ERROR: quota_manager.cpp:648: SyncQuota finished with error: >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:30.582260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:30.582284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.582290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:30.582297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:30.582303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:30.582308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:30.582321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:30.582337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:30.582465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:30.582526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:30.596656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:30.596678Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:30.599425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:30.599457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:30.599500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:30.600988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:30.601198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:30.601273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.601325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:30.603966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:30.604252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:30.604295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:30.604304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:30.604311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:30.604335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.605821Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:30.631506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:30.631587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.631647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:30.631656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:30.631700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:30.631716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:30.632444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.632512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:30.632576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.632589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:30.632595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:30.632601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:30.633077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.633090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:30.633099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:30.633533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.633545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:30.633551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.633558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:30.634326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:30.634853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:30.634900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:30.635105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:30.635134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:30.635142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.635204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:30.635213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:30.635253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:30.635266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:30.635784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:30.635794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... erId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:399:2369], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:20:32.155353Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.155360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:20:32.155389Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.155392Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:20:32.155400Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.155405Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:761: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-10-24T16:20:32.155408Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:1 240 -> 240 2025-10-24T16:20:32.155510Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.155519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:20:32.155524Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:20:32.155527Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-10-24T16:20:32.155531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-10-24T16:20:32.155541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-10-24T16:20:32.156117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.156131Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:1 ProgressState 2025-10-24T16:20:32.156146Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:1 progress is 2/2 2025-10-24T16:20:32.156150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-10-24T16:20:32.156156Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:1 progress is 2/2 2025-10-24T16:20:32.156159Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-10-24T16:20:32.156164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-10-24T16:20:32.156169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-10-24T16:20:32.156175Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:20:32.156179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:20:32.156201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-10-24T16:20:32.156208Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-10-24T16:20:32.156212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:1 2025-10-24T16:20:32.156224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:20:32.156330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:20:32.156654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:20:32.156663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:20:32.156716Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:20:32.156732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:20:32.156740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:540:2482] TestWaitNotification: OK eventTxId 102 2025-10-24T16:20:32.156808Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.156840Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 40us result status StatusSuccess 2025-10-24T16:20:32.156916Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.156970Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:32.156988Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2025-10-24T16:20:32.157048Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.157144Z node 7 :HIVE INFO: tablet_helpers.cpp:1586: [72075186233409546] TEvRequestHiveInfo, msg: extsubdomain hive tablet, type SchemeShard extsubdomain hive tablet, type Coordinator extsubdomain hive tablet, type Mediator |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest >> Yq_1::Basic_EmptyDict [GOOD] |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-10-24T16:20:30.128037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823953177318620:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:30.128078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:30.145120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E1024 16:20:30.151900659 9666 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.151958719 9666 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.155587Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.163280Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.166291Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.166702Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.166720Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.166963Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.166993Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.167018Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.167087Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.167126Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.167926Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.170066Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.170104Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.170119Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.170506Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8309 2025-10-24T16:20:30.173040Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.183201Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.196112Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.200421Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.200448Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.200454Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.200477Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.200501Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.202555Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.203604Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.203628Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.203637Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.203769Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.204061Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8309: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8309 } ] 2025-10-24T16:20:30.204178Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:2 ... ointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Update input channelId: 1, peer: [4:7564823976608007260:2777] 2025-10-24T16:20:35.875729Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2025-10-24T16:20:35.875753Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823976608007260 RawX2: 4503616807242457 } } DstEndpoint { ActorId { RawX1: 7564823976608007261 RawX2: 4503616807242458 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823976608007261 RawX2: 4503616807242458 } } DstEndpoint { ActorId { RawX1: 7564823976608007256 RawX2: 4503616807242167 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-10-24T16:20:35.875764Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:35.875951Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:35.875964Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. Taken 0 locks 2025-10-24T16:20:35.875967Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:35.875972Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:35.875977Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.875981Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:35.875985Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:35.875987Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:35.875989Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. returned 0 rows; processed 0 rows 2025-10-24T16:20:35.876003Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. dropping batch for read #0 2025-10-24T16:20:35.876006Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:35.876008Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:35.876011Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715728, task: 1, CA Id [4:7564823976608007260:2777]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:35.876037Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.876040Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2025-10-24T16:20:35.876046Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715728, task: 2. Finish input channelId: 1, from: [4:7564823976608007260:2777] 2025-10-24T16:20:35.876053Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:35.876054Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:35.876058Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:35.876063Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715728, task: 1. Tasks execution finished 2025-10-24T16:20:35.876066Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823976608007260:2777], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.876068Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:35.876080Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:35.876087Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715728, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:35.876089Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715728, task: 2. Tasks execution finished 2025-10-24T16:20:35.876091Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715728, task: 1. pass away 2025-10-24T16:20:35.876092Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823976608007261:2778], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb7vr34x75bdgxeyxsx7a. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDUxNDg3OTQtNDJiNWRmNzgtNDM2OTFiNTgtMWIwNjMyYTE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:35.876106Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715728, task: 2. pass away 2025-10-24T16:20:35.876123Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715728;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:35.876123Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715728;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:35.878071Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudu2o91v75rcse3a0" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:33.292452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.292469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.292473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.292477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.292481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.292484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.292495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.292505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.292588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.292629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.303045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:33.303064Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.305271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.305293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.305330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.306592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.306765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.306847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.306909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.307862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.307899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.308093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.308101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.308124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.308130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.308135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.308152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.309223Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.323956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.324012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.324053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.324059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.324139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.324152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.324654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.324687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.324720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.324726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.324730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.324734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.325006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.325014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.325018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.325269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.325276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.325280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.325285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.325772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.326102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.326147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.326294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.326314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.326319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.326360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:33.326365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.326387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:33.326399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:33.326727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.326732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... eshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.370706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.370710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.370715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-10-24T16:20:33.370720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-10-24T16:20:33.370738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.371110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-10-24T16:20:33.371127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-10-24T16:20:33.371190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.371205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.371209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-10-24T16:20:33.371253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:20:33.371259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-10-24T16:20:33.371271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:20:33.371283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:578: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2540], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:20:33.371638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.371644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:20:33.371665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.371669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:20:33.371704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.371709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:761: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-10-24T16:20:33.371713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 240 -> 240 2025-10-24T16:20:33.371780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:20:33.371787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:20:33.371791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:20:33.371794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:20:33.371798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-10-24T16:20:33.371807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-10-24T16:20:33.372306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.372319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:20:33.372329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:20:33.372332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:33.372336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:20:33.372338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:33.372341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-10-24T16:20:33.372345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:20:33.372348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:20:33.372351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:20:33.372372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:20:33.372489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:20:33.372774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:20:33.372780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:20:33.372826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:20:33.372841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:20:33.372847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:759:2645] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-10-24T16:20:33.373521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.373545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1079: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-10-24T16:20:33.373549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1085: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-10-24T16:20:33.373572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-10-24T16:20:33.373576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-10-24T16:20:33.373946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.373976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-10-24T16:20:30.158686Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823953068650861:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:30.159671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:30.196125144 9772 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:30.196175384 9772 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:30.198752Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19603 2025-10-24T16:20:30.199041Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.209736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:30.210584Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.217526Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.218415Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.218894Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.220870Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.221032Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.221144Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.223357Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.224591Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.225830Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.225842Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.225849Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.226711Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.226727Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.226746Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.239454Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.249933Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.250337Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.260647Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.263095Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.263186Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.263465Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.263480Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.264168Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.265628Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.265989Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:19603 } ] 2025-10-24T16:20:30.266006Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:19603: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:196 ... =4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.428414Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 1. Tasks execution finished 2025-10-24T16:20:37.428416Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823985777784671:2776], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.428445Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:389: SelfId: [4:7564823985777784672:2777], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2025-10-24T16:20:37.428446Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 1. pass away 2025-10-24T16:20:37.428459Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4067: SelfId: [4:7564823985777784677:2777], TxId: 281474976715737, task: 2. Add data: 234 / 234 2025-10-24T16:20:37.428469Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4036: SelfId: [4:7564823985777784677:2777], TxId: 281474976715737, task: 2. Send data=234, closed=1, bufferActorId=[4:7564823985777784666:2706] 2025-10-24T16:20:37.428472Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715737;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:37.428473Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:403: SelfId: [4:7564823985777784672:2777], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 234 2025-10-24T16:20:37.428478Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715737, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:37.428480Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 2. Tasks execution finished 2025-10-24T16:20:37.428482Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1588: SelfId: [4:7564823985777784672:2777], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-10-24T16:20:37.428539Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2307: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:5:1]). lockId=281474976715731. ActorId=[4:7564823985777784678:2706] 2025-10-24T16:20:37.428547Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:447: Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Open: token=0 2025-10-24T16:20:37.428557Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2497: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], ProcessRequestQueue [OwnerId: 72057594046644480, LocalPathId: 5] NOT READY queue=1 2025-10-24T16:20:37.428575Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:456: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Write: token=0 2025-10-24T16:20:37.428599Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:466: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Close: token=0 2025-10-24T16:20:37.428613Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3931: SelfId: [4:7564823985777784677:2777], TxId: 281474976715737, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7564823985777784666:2706] 2025-10-24T16:20:37.428617Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3949: SelfId: [4:7564823985777784677:2777], TxId: 281474976715737, task: 2. Finished 2025-10-24T16:20:37.428620Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823985777784672:2777], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.428626Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715737, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:37.428634Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 2. Tasks execution finished 2025-10-24T16:20:37.428637Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823985777784672:2777], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9ca8c2vaqe8bafmzhm8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTNiMzM3NTctOWJjZmQ3NzItNzE0MmYxOS02NDViZjg2NQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.428650Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 2. pass away 2025-10-24T16:20:37.428665Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715737;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:37.428751Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2641: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Start prepare for distributed commit 2025-10-24T16:20:37.428765Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1018: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]SetPrepare; txId=281474976715737 2025-10-24T16:20:37.428768Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2564: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Flush data 2025-10-24T16:20:37.428801Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1183: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Send EvWrite to ShardID=72075186224037896, isPrepare=1, isImmediateCommit=0, TxId=281474976715737, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715731 DataShard: 72075186224037896 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 5, Size=324, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=324 2025-10-24T16:20:37.428828Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2762: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Send EvWrite (external) to ShardID=72075186224037889, isPrepare=1, isImmediateCommit=0, TxId=281474976715737, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715731 DataShard: 72075186224037889 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 7, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2025-10-24T16:20:37.429107Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:708: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Recv EvWriteResult from ShardID=72075186224037896, Status=STATUS_PREPARED, TxId=281474976715737, Locks= , Cookie=1 2025-10-24T16:20:37.429134Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2564: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Flush data 2025-10-24T16:20:37.429165Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3269: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Recv EvWriteResult (external) from ShardID=72075186224037889, Status=STATUS_PREPARED, TxId=281474976715737, Locks= , Cookie=0 2025-10-24T16:20:37.429173Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3566: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Got prepared result TxId=281474976715737, TabletId=72075186224037889, Cookie=0 2025-10-24T16:20:37.429178Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2687: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Start distributed commit with TxId=281474976715737 2025-10-24T16:20:37.429181Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1026: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]SetDistributedCommit; txId=281474976715737 2025-10-24T16:20:37.429186Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2861: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2025-10-24T16:20:37.429281Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2920: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Got transaction status, status: 16 2025-10-24T16:20:37.429339Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2920: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Got transaction status, status: 17 2025-10-24T16:20:37.431220Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3301: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Recv EvWriteResult (external) from ShardID=72075186224037889, Status=STATUS_COMPLETED, TxId=281474976715737, Locks= , Cookie=0 2025-10-24T16:20:37.431236Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3595: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Got completed result TxId=281474976715737, TabletId=72075186224037889, Cookie=0, Locks= 2025-10-24T16:20:37.431264Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:708: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Recv EvWriteResult from ShardID=72075186224037896, Status=STATUS_COMPLETED, TxId=281474976715737, Locks= , Cookie=0 2025-10-24T16:20:37.431269Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:978: SelfId: [4:7564823985777784678:2706], Table: `Root/yq/connections` ([72057594046644480:5:1]), SessionActorId: [4:7564823981482816990:2706]Got completed result TxId=281474976715737, TabletId=72075186224037896, Cookie=0, Mode=2, Locks= 2025-10-24T16:20:37.431271Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3628: SelfId: [4:7564823985777784666:2706], SessionActorId: [4:7564823981482816990:2706], Committed TxId=281474976715737 >> KqpService::CloseSessionsWithLoad |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: E1024 16:20:29.827286888 8759 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:29.827344341 8759 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:29.829103Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.831558Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:29570 2025-10-24T16:20:29.840872Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.844557Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.845787Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.845915Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846138Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846150Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846353Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846363Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846377Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846432Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846441Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846466Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.846475Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.847512Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.852107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:29.852149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:20:29.860895Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.864911Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.864938Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.865012Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.865016Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.865033Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.865123Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.865162Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.866089Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.866112Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.866112Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.866119Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.867114Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:29570: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:29570 } ] 2025-10-24T16:20:29.867129Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last ... 6715708, task: 1, CA Id [1:7564823985116197559:2700]. Add point to new shardId: 72075186224037889 2025-10-24T16:20:37.409393Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:731: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Pending shards States: TShardState{ TabletId: 72075186224037889, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://folder_id_WTF, String : utqudu2o9133o0efc8di)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://folder_id_WTF, String : utqudu2o9133o0efc8di)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-10-24T16:20:37.409395Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:37.409397Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:471: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. BEFORE: 1.0 2025-10-24T16:20:37.409408Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:902: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Send EvRead to shardId: 72075186224037889, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-10-24T16:20:37.409416Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:485: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. AFTER: 0.1 2025-10-24T16:20:37.409418Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-10-24T16:20:37.409735Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Recv TEvReadResult from ShardID=72075186224037889, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:37.409741Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Taken 0 locks 2025-10-24T16:20:37.409744Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:37.409748Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:37.409754Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.409758Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:37.409761Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. enter pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:37.409770Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. exit pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-10-24T16:20:37.409774Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. returned 1 rows; processed 1 rows 2025-10-24T16:20:37.409783Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. dropping batch for read #0 2025-10-24T16:20:37.409785Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:37.409787Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:37.409790Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715708, task: 1, CA Id [1:7564823985116197559:2700]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:37.409840Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:37.409843Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.409851Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715708, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-10-24T16:20:37.409854Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197560:2701], TxId: 281474976715708, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2025-10-24T16:20:37.409862Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715708, task: 2. Finish input channelId: 1, from: [1:7564823985116197559:2700] 2025-10-24T16:20:37.409870Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197560:2701], TxId: 281474976715708, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:37.409896Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [1:7564823985116197560:2701], TxId: 281474976715708, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:37.409899Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:37.409903Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.409907Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715708, task: 1. Tasks execution finished 2025-10-24T16:20:37.409909Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [1:7564823985116197559:2700], TxId: 281474976715708, task: 1. Ctx: { TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.409932Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715708, task: 1. pass away 2025-10-24T16:20:37.409953Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715708;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:37.409996Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [1:7564823985116197560:2701], TxId: 281474976715708, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-10-24T16:20:37.410001Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715708, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:37.410003Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715708, task: 2. Tasks execution finished 2025-10-24T16:20:37.410004Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [1:7564823985116197560:2701], TxId: 281474976715708, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9cz0yxje1zn7h1fkb61. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MzE1MGFhNzQtOTdmZjdmZjctYTc3ZGQ2ODEtYmFhNDA1ODY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.410010Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715708, task: 2. pass away 2025-10-24T16:20:37.410016Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715708;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:37.436503Z node 1 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:5638: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:5638 [good] Yq_1::CreateQuery_Without_Connection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-10-24T16:20:29.771326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823950612164831:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:29.771356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:29.815301479 8641 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:29.815339523 8641 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:29.815615Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.815636Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.815644Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.816799Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.816814Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.816820Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.817092Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.817460Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.817669Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24724 2025-10-24T16:20:29.817689Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.817695Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.819869Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.819888Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.820131Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.822414Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.826342Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.828779Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:29.840998Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.841024Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.841033Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.844148Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.844154Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.844477Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.844742Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.846565Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.846700Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.846721Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.847384Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.847396Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] 2025-10-24T16:20:29.847432Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24724: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24724 } ] ... 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:644: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Processing resolved ShardId# 72075186224037897, partition range: [(String : yandexcloud://some_folder_id, String : utqudu2o8vnojvmmaefa) ; ()), i: 0, state ranges: 0, points: 1 2025-10-24T16:20:37.809829Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:682: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Add point to new shardId: 72075186224037897 2025-10-24T16:20:37.809841Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:731: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Pending shards States: TShardState{ TabletId: 72075186224037897, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudu2o8vnojvmmaefa)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudu2o8vnojvmmaefa)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-10-24T16:20:37.809846Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:37.809849Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:471: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. BEFORE: 1.0 2025-10-24T16:20:37.809861Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:902: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Send EvRead to shardId: 72075186224037897, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-10-24T16:20:37.809878Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:485: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. AFTER: 0.1 2025-10-24T16:20:37.809885Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-10-24T16:20:37.810123Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:37.810132Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Taken 0 locks 2025-10-24T16:20:37.810135Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:37.810140Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:37.810145Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.810150Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:37.810153Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:37.810163Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-10-24T16:20:37.810171Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. returned 1 rows; processed 1 rows 2025-10-24T16:20:37.810190Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. dropping batch for read #0 2025-10-24T16:20:37.810197Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:37.810200Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:37.810204Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715712, task: 1, CA Id [7:7564823986343276907:2705]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:37.810259Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:37.810265Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.810268Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276908:2706], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-10-24T16:20:37.810272Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715712, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-10-24T16:20:37.810283Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715712, task: 2. Finish input channelId: 1, from: [7:7564823986343276907:2705] 2025-10-24T16:20:37.810304Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:37.810305Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276908:2706], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.810312Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.810314Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715712, task: 1. Tasks execution finished 2025-10-24T16:20:37.810324Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [7:7564823986343276907:2705], TxId: 281474976715712, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.810353Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 1. pass away 2025-10-24T16:20:37.810356Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [7:7564823986343276908:2706], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:37.810385Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:37.810419Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [7:7564823986343276908:2706], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:37.810435Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715712, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:37.810442Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715712, task: 2. Tasks execution finished 2025-10-24T16:20:37.810444Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [7:7564823986343276908:2706], TxId: 281474976715712, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgb9sge96r83mdt1c2hb4b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=ZTc5NzYzNjMtMmE4NjVlNzctMWE3NGJlYWMtNGJhM2ZlYjg=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:37.810458Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715712, task: 2. pass away 2025-10-24T16:20:37.810478Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715712;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> IndexBuildTestReboots::CancelBuild [GOOD] >> IndexBuildTestReboots::CancelBuildUniq >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> Yq_1::ModifyQuery [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |64.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest |64.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Table >> TxUsage::WriteToTopic_Demo_44_Table >> TxUsage::Write_And_Read_Big_Messages_1 |64.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-10-24T16:20:29.771713Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823951372297063:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:29.771900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1024 16:20:29.805732631 8647 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:20:29.805782665 8647 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:20:29.811610Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2722 2025-10-24T16:20:29.817277Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.826487Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.843233Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.843410Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.843987Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844011Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844018Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844092Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844472Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844492Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844498Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844508Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844537Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844609Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.844746Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.845474Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.849625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:29.856014Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.856786Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.856996Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.857673Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.857880Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.857996Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.858743Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.859959Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.860147Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.860572Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.860693Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.860706Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:2722: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2722 } ] 2025-10-24T16:20:29.860714Z node 1 :YQ_CONTROL_PLANE_STORAGE ... TUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7564823992914807265:2802] 2025-10-24T16:20:39.531019Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-10-24T16:20:39.531038Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1074: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823992914807265 RawX2: 4503616807242482 } } DstEndpoint { ActorId { RawX1: 7564823992914807266 RawX2: 4503616807242483 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7564823992914807266 RawX2: 4503616807242483 } } DstEndpoint { ActorId { RawX1: 7564823992914807261 RawX2: 4503616807242431 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-10-24T16:20:39.531046Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531265Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:977: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-10-24T16:20:39.531274Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1069: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. Taken 0 locks 2025-10-24T16:20:39.531276Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1084: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. new data for read #0 seqno = 1 finished = 1 2025-10-24T16:20:39.531281Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-10-24T16:20:39.531286Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531289Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1351: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-10-24T16:20:39.531291Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1247: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-10-24T16:20:39.531300Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1328: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8387493 2025-10-24T16:20:39.531304Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1385: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. returned 1 rows; processed 1 rows 2025-10-24T16:20:39.531315Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1422: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. dropping batch for read #0 2025-10-24T16:20:39.531321Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:467: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. effective maxinflight 1024 sorted 0 2025-10-24T16:20:39.531322Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:489: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-10-24T16:20:39.531325Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1447: TxId: 281474976715728, task: 1, CA Id [4:7564823992914807265:2802]. returned async data processed rows 1 left freeSpace 8387493 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-10-24T16:20:39.531405Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:39.531412Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531422Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715728, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-10-24T16:20:39.531432Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-10-24T16:20:39.531441Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715728, task: 2. Finish input channelId: 1, from: [4:7564823992914807265:2802] 2025-10-24T16:20:39.531452Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531478Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:502: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-10-24T16:20:39.531485Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-10-24T16:20:39.531489Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531493Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715728, task: 1. Tasks execution finished 2025-10-24T16:20:39.531495Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823992914807265:2802], TxId: 281474976715728, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:39.531523Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715728, task: 1. pass away 2025-10-24T16:20:39.531556Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715728;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-10-24T16:20:39.531644Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-10-24T16:20:39.531654Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715728, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-10-24T16:20:39.531655Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715728, task: 2. Tasks execution finished 2025-10-24T16:20:39.531657Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:510: SelfId: [4:7564823992914807266:2803], TxId: 281474976715728, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgbbe85evqwp7dysgctx0p. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ZjJjNDM2ZWQtMmRhNjM2MTUtOGMyNWM3LWIyZjViZjA5. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-10-24T16:20:39.531662Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715728, task: 2. pass away 2025-10-24T16:20:39.531667Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715728;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query >> TxUsage::Transactions_Conflict_On_SeqNo_Table |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/show_create_view |63.8%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/exec/yql-dq-comp_nodes-hash_join_utils-simd-exec.pkg.fake >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] |63.7%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Write_And_Read_Big_Messages_2 >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> TxUsage::Write_And_Read_Huge_Messages_2 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26994, MsgBus: 28391 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009cf/r3tmp/tmpndb46W/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26994, node 1 TClient is connected to server localhost:28391 TClient is connected to server localhost:28391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27490, MsgBus: 7404 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000986/r3tmp/tmpNl465D/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27490, node 1 TClient is connected to server localhost:7404 TClient is connected to server localhost:7404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |63.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/node_broker >> TxUsage::Write_And_Read_Huge_Messages_1 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 17367, MsgBus: 22043 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009ab/r3tmp/tmp5EVsCT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17367, node 1 TClient is connected to server localhost:22043 TClient is connected to server localhost:22043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 28071, MsgBus: 9588 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009d6/r3tmp/tmptNbChe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28071, node 1 TClient is connected to server localhost:9588 TClient is connected to server localhost:9588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/exec/pack_tuple/pack_tuple |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |63.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |63.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> ydb-tests-functional-tpcc::import_test [GOOD] |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/exec/tuples_to_bucket/tuples_to_bucket |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column >> KqpQueryService::CloseSessionsWithLoad [GOOD] |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/exec/stream_store/stream_store |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpcc/import_test >> ydb-tests-functional-tpcc::import_test [GOOD] |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> ydb-tests-tools-kqprun-tests::import_test [GOOD] >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Query |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |62.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 24705, MsgBus: 20862 2025-10-24T16:20:36.387641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823978895276620:2074];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:36.387663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d85/r3tmp/tmpqEeLDO/pdisk_1.dat 2025-10-24T16:20:36.427129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:36.429842Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24705, node 1 2025-10-24T16:20:36.441594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:36.441606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:36.441607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:36.441646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20862 TClient is connected to server localhost:20862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:20:36.490414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:36.490441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:36.491464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:36.498682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:36.502359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:36.519319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:36.535111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:36.548258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:36.587749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:36.768872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823978895278224:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.768903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.769003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823978895278234:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.769021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.809777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.816616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.827429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.841514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.854850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.861864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.869393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.883976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:36.899472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823978895279096:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.899492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.899549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823978895279101:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.899576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823978895279102:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.899595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:36.900240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... ph.cpp:266: Create result channelId: 3 from task: 3 with index: 0 2025-10-24T16:20:40.720624Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:266: Create result channelId: 4 from task: 4 with index: 0 2025-10-24T16:20:40.720627Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:266: Create result channelId: 5 from task: 5 with index: 0 2025-10-24T16:20:40.720629Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:266: Create result channelId: 6 from task: 6 with index: 0 2025-10-24T16:20:40.723798Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:273: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: }. Execution is complete, results: 6 2025-10-24T16:20:40.727685Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:394: [[1:7564823996075150852:3245]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQAzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1761322840","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-774291387 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-774291387 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-774291387 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-774291387 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-774291387 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-774291387 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (1169482429ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 1169482429,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"9d9bb70b-10f38bab-95efe3b0-54a02207","version":"1.0"} 2025-10-24T16:20:40.728019Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:561: Compilation successful, self: [1:7564823996075150852:3245], duration: 0.149876s 2025-10-24T16:20:40.728033Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:406: Send response, self: [1:7564823996075150852:3245], owner: [1:7564823978895278195:2381], status: SUCCESS, issues: , uid: 9d9bb70b-10f38bab-95efe3b0-54a02207 2025-10-24T16:20:40.728070Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7564823983190246893:2665], status: SUCCESS, compileActor: [1:7564823996075150852:3245] 2025-10-24T16:20:40.728112Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-774291387 + 0;\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-774291387 + 1;\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-774291387 + 2;\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-774291387 + 3;\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-774291387 + 4;\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-774291387 + 5;\n\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\n (1169482429ul, \"New\");\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-10-24T16:20:40.728191Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7564823983190246893:2665], queryUid: 9d9bb70b-10f38bab-95efe3b0-54a02207, status:SUCCESS 2025-10-24T16:20:41.388144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564823978895276620:2074];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:41.388189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; still compiling... 0 still active sessions ... 0 |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session >> ydb-tests-functional-minidumps::import_test [GOOD] >> ydb-tests-olap-data_quotas::import_test [GOOD] >> SystemView::Nodes |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] |62.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::ShowCreateTablePartitionAtKeys >> AuthSystemView::AuthUsers_LockUnlock |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl >> AuthSystemView::AuthGroups >> AuthSystemView::AuthUsers >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> SystemView::VSlotsFields >> IndexBuildTestReboots::CancelBuildUniq [GOOD] >> SystemView::Nodes [GOOD] >> SystemView::PartitionStatsAfterRemoveColumnTable |61.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |61.8%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::CancelBuildUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:34.802744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:34.802761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.802767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:34.802773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:34.802779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:34.802784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:34.802792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.802805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:34.802919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:34.802962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:34.818898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:34.818922Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:34.819015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:34.821254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:34.821315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:34.821345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:34.823131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:34.823239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:34.823325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.823377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:34.824044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.824073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:34.824275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:34.824288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.824316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:34.824324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:34.824330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:34.824352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:34.825454Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:34.840670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:34.840733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.840771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:34.840776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:34.840829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:34.840854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:34.841347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.841385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:34.841423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.841430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:34.841436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:34.841441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:34.841799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.841808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:34.841814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:34.842108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.842117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.842121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:34.842126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:34.842523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:34.842786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:34.842811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:34.842939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.842954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... 6:20:45.103450Z node 40 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103463Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-10-24T16:20:45.103468Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [40:878:2819] TestWaitNotification: OK eventTxId 1004 TestWaitNotification wait txId: 1003 2025-10-24T16:20:45.103512Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:20:45.103516Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-10-24T16:20:45.103553Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:103: NotifyTxCompletion index build in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103559Z node 40 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:110: NotifyTxCompletion, index build is ready to notify, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103570Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:20:45.103576Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:881:2822] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:20:45.103634Z node 40 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-10-24T16:20:45.103663Z node 40 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 1003 State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 1003 State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-10-24T16:20:45.103714Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103745Z node 40 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table" took 34us result status StatusSuccess 2025-10-24T16:20:45.103878Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table" PathDescription { Self { Name: "Table" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103963Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:20:45.103986Z node 40 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 25us result status StatusSuccess 2025-10-24T16:20:45.104088Z node 40 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:45.104132Z node 40 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 1005 DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-10-24T16:20:45.104157Z node 40 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:101: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 1005 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 1005 Status: SUCCESS |61.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli >> kqprun_recipe::import_test [GOOD] |61.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl >> AuthSystemView::AuthGroups [GOOD] >> AuthSystemView::AuthUsers [GOOD] >> AuthSystemView::AuthGroups_Access >> AuthSystemView::AuthUsers_Access >> SystemView::ShowCreateTableDefaultLiteral >> ShowCreateView::FromTable >> SystemView::PartitionStatsAfterRemoveColumnTable [GOOD] >> SystemView::ConcurrentScans |61.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |61.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> SystemView::SysViewScanBackPressure >> KqpService::CloseSessionsWithLoad [GOOD] |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SystemView::ShowCreateTablePartitionByHash >> AuthSystemView::AuthGroups_Access [GOOD] >> AuthSystemView::AuthGroups_ResultOrder >> SystemView::ConcurrentScans [GOOD] >> SystemView::PDisksFields >> AuthSystemView::AuthUsers_Access [GOOD] >> AuthSystemView::AuthOwners-EnableRealSystemViewPaths >> TxUsage::Write_And_Read_Big_Messages_1 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 25451, MsgBus: 21965 2025-10-24T16:20:39.094085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823993685889766:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:39.094109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000bd9/r3tmp/tmpZ8wLnJ/pdisk_1.dat 2025-10-24T16:20:39.125062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:39.137284Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25451, node 1 2025-10-24T16:20:39.148342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:39.148353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:39.148355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:39.148399Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21965 TClient is connected to server localhost:21965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:39.197277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:39.197309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:39.198502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:39.203160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:39.213592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:39.233300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:39.251697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:39.265136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:39.329174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:39.416077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823993685891293:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.416106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.416191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823993685891303:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.416213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.462700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.470066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.480374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.495254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.508624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.522866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.537116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.550824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:39.567787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823993685892165:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.567821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.567823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823993685892170:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.567925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564823993685892172:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.567943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:39.568723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... t_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":16}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":18}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":11,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":11}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":13}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":7,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":7,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":11,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":21,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":22,\"Plans\":[{\"PlanNodeId\":23,\"Plans\":[{\"PlanNodeId\":25,\"Plans\":[{\"PlanNodeId\":26,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":27,\"Plans\":[{\"PlanNodeId\":28,\"Plans\":[{\"PlanNodeId\":30,\"Plans\":[{\"PlanNodeId\":31,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"4cd28bc6-d6433716-38361025-a65e5236","version":"1.0"} 2025-10-24T16:20:41.398441Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:561: Compilation successful, self: [1:7564824002275827972:2947], duration: 0.299125s 2025-10-24T16:20:41.398457Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:406: Send response, self: [1:7564824002275827972:2947], owner: [1:7564823993685891264:2381], status: SUCCESS, issues: , uid: 4cd28bc6-d6433716-38361025-a65e5236 2025-10-24T16:20:41.398519Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7564823993685892566:2565], status: SUCCESS, compileActor: [1:7564824002275827972:2947] 2025-10-24T16:20:41.398551Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7564823993685892566:2565], queryUid: 4cd28bc6-d6433716-38361025-a65e5236, status:SUCCESS still compiling... 0 still active sessions ... 0 2025-10-24T16:20:44.094423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564823993685889766:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:44.094474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; received non-success status for session 12 received non-success status for session 23 received non-success status for session 8 received non-success status for session 14 received non-success status for session 1 received non-success status for session 3 received non-success status for session 10 received non-success status for session 4 received non-success status for session 5 received non-success status for session 24 received non-success status for session 15 received non-success status for session 20 received non-success status for session 2 received non-success status for session 7 received non-success status for session 36 received non-success status for session 48 received non-success status for session 32 received non-success status for session 26 received non-success status for session 25 received non-success status for session 18 received non-success status for session 19 received non-success status for session 38 received non-success status for session 35 received non-success status for session 41 received non-success status for session 42 received non-success status for session 37 received non-success status for session 28 received non-success status for session 40 received non-success status for session 45 received non-success status for session received non-success status for session 34 6 received non-success status for session 30 received non-success status for session 47 received non-success status for session 27 received non-success status for session 44 received non-success status for session 11 received non-success status for session 39 received non-success status for session 21 received non-success status for session 31 received non-success status for session 0 received non-success status for session 17 received non-success status for session 29 received non-success status for session 33 received non-success status for session 13 received non-success status for session 16 received non-success status for session 22 received non-success status for session 46 received non-success status for session 49 received non-success status for session 43 |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload >> TxUsage::Write_50k_100times_50tx |60.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql >> TxUsage::Write_And_Read_Huge_Messages_1 [GOOD] >> ydb-tests-olap-scenario::import_test [GOOD] >> ShowCreateView::FromTable [GOOD] >> TxUsage::Write_And_Read_Gigant_Messages_2 >> ShowCreateView::WithTablePathPrefix |60.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |60.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> AuthSystemView::AuthGroups_ResultOrder [GOOD] >> AuthSystemView::AuthGroups_TableRange >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuTables >> AuthSystemView::AuthOwners-EnableRealSystemViewPaths [GOOD] >> AuthSystemView::AuthOwners_Access |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> AuthSystemView::AuthUsers_LockUnlock [GOOD] >> AuthSystemView::AuthUsers_ResultOrder >> TxUsage::Write_And_Read_Big_Messages_2 [GOOD] >> SystemView::PartitionStatsOneSchemeShard >> TxUsage::Write_And_Read_Huge_Messages_2 [GOOD] >> TxUsage::Write_And_Read_Gigant_Messages_1 >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTableColumn >> AuthSystemView::AuthGroups_TableRange [GOOD] >> AuthSystemView::AuthGroupMembers |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_And_Read_Huge_Messages_2 [GOOD] Test command err: 2025-10-24T16:20:42.100135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824007793444321:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:42.100309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:42.103677Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0013bf/r3tmp/tmp9b7Uog/pdisk_1.dat 2025-10-24T16:20:42.133370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:42.133605Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824007793444290:2081] 1761322842099946 != 1761322842099949 2025-10-24T16:20:42.136056Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18062, node 1 2025-10-24T16:20:42.140209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0013bf/r3tmp/yandexS5DjKC.tmp 2025-10-24T16:20:42.140218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0013bf/r3tmp/yandexS5DjKC.tmp 2025-10-24T16:20:42.140273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0013bf/r3tmp/yandexS5DjKC.tmp 2025-10-24T16:20:42.140308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:42.143060Z INFO: TTestServer started on Port 4454 GrpcPort 18062 TClient is connected to server localhost:4454 PQClient connected to localhost:18062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:42.203293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:42.203319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:42.204330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:42.206812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:42.217245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:42.328289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:42.396224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007793445110:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.396360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.396446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007793445124:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.396460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.396484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007793445128:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:42.397374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007793445159:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007793445161:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.399334Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824007793445130:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:42.429622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.436257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.455169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.457640Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824007793445375:2552] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7564824007793445480:2622] 2025-10-24T16:20:43.101650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:47.100393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824007793444321:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:47.100442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:47.676614Z :Write_And_Read_Huge_Messages_2 INFO: TTopicSdkTestSetup started 2025-10-24T16:20:47.680358Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:47.684822Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824029268282207:2727] connected; active server actors: 1 2025-10-24T16:20:47.684875Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:47.684966Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:47.685002Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 7 ... DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.210212Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.210214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.305941Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.305954Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.305957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.310548Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.310560Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.310562Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.406285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.406293Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.406295Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.410893Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.410905Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.410907Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.506645Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.506655Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.506657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.511239Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.511248Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.511250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.606993Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.607004Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.607005Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.611585Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.611594Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.611595Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.707346Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.707357Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.707359Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.711966Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.711973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.711975Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.807692Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.807703Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.807705Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.812286Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.812295Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.812296Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.908053Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.908063Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.908064Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:51.912651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:51.912661Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:51.912663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:52.008424Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:52.008440Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:52.008444Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:52.012973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:52.012985Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:52.012987Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:20:52.080771Z :INFO: [/Root] [/Root] [833a9098-8f569030-eff4cfb5-fc47ed16] Closing read session. Close timeout: 0.000000s 2025-10-24T16:20:52.080783Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:3:4 2025-10-24T16:20:52.080789Z :INFO: [/Root] [/Root] [833a9098-8f569030-eff4cfb5-fc47ed16] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2002 BytesRead: 36000000 MessagesRead: 4 BytesReadCompressed: 36000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:20:52.080805Z :NOTICE: [/Root] [/Root] [833a9098-8f569030-eff4cfb5-fc47ed16] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:20:52.080811Z :DEBUG: [/Root] [/Root] [833a9098-8f569030-eff4cfb5-fc47ed16] [] Abort session to cluster 2025-10-24T16:20:52.081061Z :NOTICE: [/Root] [/Root] [833a9098-8f569030-eff4cfb5-fc47ed16] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:20:52.081120Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_1_1_2817970722510238696_v1 grpc read done: success# 0, data# { } 2025-10-24T16:20:52.081132Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_1_1_2817970722510238696_v1 grpc read failed 2025-10-24T16:20:52.081138Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_1_1_2817970722510238696_v1 grpc closed 2025-10-24T16:20:52.081154Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_1_1_2817970722510238696_v1 is DEAD 2025-10-24T16:20:52.081225Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037894] Destroy direct read session test-consumer_1_1_2817970722510238696_v1 2025-10-24T16:20:52.081236Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [1:7564824042153184556:2534] destroyed 2025-10-24T16:20:52.081249Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_1_1_2817970722510238696_v1 2025-10-24T16:20:52.081467Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037895][topic_A] pipe [1:7564824042153184553:2531] disconnected. 2025-10-24T16:20:52.081476Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037895][topic_A] pipe [1:7564824042153184553:2531] disconnected; active server actors: 1 2025-10-24T16:20:52.081478Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037895][topic_A] pipe [1:7564824042153184553:2531] client test-consumer disconnected session test-consumer_1_1_2817970722510238696_v1 2025-10-24T16:20:52.108732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:52.108742Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:52.108743Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:20:52.113348Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:52.113360Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:52.113361Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist >> AuthSystemView::AuthOwners_Access [GOOD] >> AuthSystemView::AuthOwners_ResultOrder >> SystemView::ShowCreateTablePartitionByHash [GOOD] >> SystemView::ShowCreateTablePartitionSettings >> ShowCreateView::WithTablePathPrefix [GOOD] |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> ShowCreateView::WithSingleQuotedTablePathPrefix >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading >> AuthSystemView::AuthUsers_ResultOrder [GOOD] >> AuthSystemView::AuthUsers_TableRange |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets >> AuthSystemView::AuthGroupMembers [GOOD] >> SystemView::PDisksFields [GOOD] >> AuthSystemView::AuthGroupMembers_Access >> SystemView::GroupsFields |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive >> AuthSystemView::AuthOwners_ResultOrder [GOOD] >> AuthSystemView::AuthOwners_TableRange+EnableRealSystemViewPaths >> SystemView::SysViewScanBackPressure [GOOD] >> SystemView::StoragePoolsRanges |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> TxUsage::Transactions_Conflict_On_SeqNo_Table [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery >> TxUsage::Transactions_Conflict_On_SeqNo_Query >> AuthSystemView::AuthUsers_TableRange [GOOD] >> DbCounters::TabletsSimple |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 >> AuthSystemView::AuthGroupMembers_Access [GOOD] >> AuthSystemView::AuthGroupMembers_ResultOrder >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Query [GOOD] >> ShowCreateView::WithSingleQuotedTablePathPrefix [GOOD] >> ShowCreateView::WithPairedTablePathPrefix |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize >> AuthSystemView::AuthOwners_TableRange+EnableRealSystemViewPaths [GOOD] >> AuthSystemView::AuthOwners_TableRange-EnableRealSystemViewPaths >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff >> SystemView::QueryStats >> SystemView::ShowCreateTablePartitionSettings [GOOD] >> SystemView::ShowCreateTableReadReplicas >> ydb-tests-functional-limits::import_test [GOOD] >> SystemView::ShowCreateTableColumn [GOOD] >> SystemView::ShowCreateTable >> ydb-tests-functional-serializable::import_test [GOOD] >> TxUsage::Write_50k_100times_50tx [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode >> AuthSystemView::AuthGroupMembers_ResultOrder [GOOD] >> AuthSystemView::AuthGroupMembers_TableRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions_Query [GOOD] Test command err: 2025-10-24T16:20:43.265529Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824009492307072:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:43.265576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:43.267947Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001350/r3tmp/tmpSoFxIN/pdisk_1.dat 2025-10-24T16:20:43.295528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:43.303707Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:43.303869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824009492306969:2081] 1761322843264949 != 1761322843264952 TServer::EnableGrpc on GrpcPort 12671, node 1 2025-10-24T16:20:43.309869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001350/r3tmp/yandex6Q6WRs.tmp 2025-10-24T16:20:43.309879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001350/r3tmp/yandex6Q6WRs.tmp 2025-10-24T16:20:43.309935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001350/r3tmp/yandex6Q6WRs.tmp 2025-10-24T16:20:43.309978Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:43.312713Z INFO: TTestServer started on Port 17006 GrpcPort 12671 TClient is connected to server localhost:17006 PQClient connected to localhost:12671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:43.371406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:43.371434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:43.372126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:43.372392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-10-24T16:20:43.378820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:43.549698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:43.561971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824009492307777:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824009492307803:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824009492307808:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:43.562933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824009492307841:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824009492307843:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.562984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:43.564095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824009492307807:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:43.607395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:43.613381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:43.629751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:43.656711Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824009492308119:2594] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824009492308159:2622] 2025-10-24T16:20:44.267183Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:48.265902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824009492307072:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:48.265939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:48.861275Z :Write_Random_Sized_Messages_In_Wide_Transactions_Query INFO: TTopicSdkTestSetup started 2025-10-24T16:20:48.864121Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:48.867574Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824030967144881:2725] connected; active server actors: 1 2025-10-24T16:20:48.867670Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:48.867761Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-10-24T16:20:48.867793Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] s ... eProxy. Passthrough write request to KV 2025-10-24T16:20:57.493525Z node 1 :PERSQUEUE DEBUG: read.h:350: [72075186224037899][PQCacheProxy]CacheProxy. Delete blobs from d0000000013_00000000000000000006_00000_0000000001_00012?(+) to d0000000013_00000000000000000006_00000_0000000001_00012?(+) 2025-10-24T16:20:57.493526Z node 1 :PERSQUEUE DEBUG: read.h:350: [72075186224037899][PQCacheProxy]CacheProxy. Delete blobs from d0000000013_00000000000000000007_00016_0000000001_00005?(+) to d0000000013_00000000000000000007_00016_0000000001_00005?(+) 2025-10-24T16:20:57.493527Z node 1 :PERSQUEUE DEBUG: read.h:350: [72075186224037899][PQCacheProxy]CacheProxy. Delete blobs from d0000000013_00000000000000000009_00016_0000000001_00006?(+) to d0000000013_00000000000000000009_00016_0000000001_00006?(+) 2025-10-24T16:20:57.493528Z node 1 :PERSQUEUE DEBUG: read.h:350: [72075186224037899][PQCacheProxy]CacheProxy. Delete blobs from d0000000013_00000000000000000005_00016_0000000001_00004|(+) to d0000000013_00000000000000000005_00016_0000000001_00004|(+) 2025-10-24T16:20:57.493651Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037912][Partition][15][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:57.493659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037912][Partition][15][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:57.493661Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037912][Partition][15][StateIdle] Try persist 2025-10-24T16:20:57.493693Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][8][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:57.493698Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][8][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:57.493699Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][8][StateIdle] Try persist 2025-10-24T16:20:57.494029Z node 1 :PERSQUEUE DEBUG: cache_eviction.h:438: Erasing blob in L1. Partition 14 offset 7 size 1537400 cause it's been evicted from L2. Actual L1 size: 3 2025-10-24T16:20:57.494037Z node 1 :PERSQUEUE DEBUG: cache_eviction.h:438: Erasing blob in L1. Partition 14 offset 6 size 2560604 cause it's been evicted from L2. Actual L1 size: 2 2025-10-24T16:20:57.494039Z node 1 :PERSQUEUE DEBUG: cache_eviction.h:438: Erasing blob in L1. Partition 14 offset 5 size 5121044 cause it's been evicted from L2. Actual L1 size: 1 2025-10-24T16:20:57.494041Z node 1 :PERSQUEUE NOTICE: read.h:372: [72075186224037910][PQCacheProxy]Have to remove new data from cache. Topic topic_A, tablet id72075186224037910, cookie 0 2025-10-24T16:20:57.494055Z node 1 :PERSQUEUE DEBUG: read.h:275: [72075186224037910][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:20:57.494067Z node 1 :PERSQUEUE DEBUG: read.h:313: [72075186224037910][PQCacheProxy]CacheProxy. Passthrough blob. Partition 14 offset 3 partNo 16 count 2 size 8194009 2025-10-24T16:20:57.494075Z node 1 :PERSQUEUE DEBUG: read.h:313: [72075186224037910][PQCacheProxy]CacheProxy. Passthrough blob. Partition 14 offset 5 partNo 6 count 2 size 4609462 2025-10-24T16:20:57.494079Z node 1 :PERSQUEUE DEBUG: read.h:313: [72075186224037910][PQCacheProxy]CacheProxy. Passthrough blob. Partition 14 offset 7 partNo 16 count 2 size 5122160 2025-10-24T16:20:57.494082Z node 1 :PERSQUEUE DEBUG: read.h:331: [72075186224037910][PQCacheProxy]CacheProxy. Rename blob from d0000000014_00000000000000000007_00000_0000000000_00016? to d0000000014_00000000000000000007_00000_0000000000_00016 2025-10-24T16:20:57.494084Z node 1 :PERSQUEUE DEBUG: read.h:331: [72075186224037910][PQCacheProxy]CacheProxy. Rename blob from d0000000014_00000000000000000009_00000_0000000001_00016? to d0000000014_00000000000000000009_00000_0000000001_00016 2025-10-24T16:20:57.494088Z node 1 :PERSQUEUE DEBUG: cache_eviction.h:438: Erasing blob in L1. Partition 14 offset 8 size 3584780 cause it's been evicted from L2. Actual L1 size: 0 2025-10-24T16:20:57.494089Z node 1 :PERSQUEUE NOTICE: read.h:372: [72075186224037910][PQCacheProxy]Have to remove new data from cache. Topic topic_A, tablet id72075186224037910, cookie 0 2025-10-24T16:20:57.494107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037910][Partition][14][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:57.494112Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037910][Partition][14][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:57.494113Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037910][Partition][14][StateIdle] Try persist 2025-10-24T16:20:57.494188Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1045: [PQ: 72075186224037910] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-10-24T16:20:57.494272Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037897][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:20:57.494280Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:57.494281Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:57.494282Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][0][StateIdle] Try persist 2025-10-24T16:20:57.494287Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037897][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:20:57.494289Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:20:57.494290Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:20:57.494291Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][0][StateIdle] Try persist 2025-10-24T16:20:57.494304Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:445: [72075186224037897][Partition][0][StateIdle] Continue blobs compaction 2025-10-24T16:20:57.494314Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:478: [72075186224037897][Partition][0][StateIdle] key[0/5] d0000000000_00000000000000000006_00000_0000000001_00009? 2025-10-24T16:20:57.494316Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:508: [72075186224037897][Partition][0][StateIdle] Need to compact head 0 2025-10-24T16:20:57.494319Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:278: [72075186224037897][Partition][0][StateIdle] requestedBlob.Key=d0000000000_00000000000000000006_00000_0000000001_00009?, parameters.CurOffset=6 2025-10-24T16:20:57.499147Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.0/10 2025-10-24T16:20:57.499162Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 0 2025-10-24T16:20:57.499167Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:0 appended 2025-10-24T16:20:57.499284Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.1/10 2025-10-24T16:20:57.499293Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 1 2025-10-24T16:20:57.499295Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:1 appended 2025-10-24T16:20:57.499421Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.2/10 2025-10-24T16:20:57.499430Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 2 2025-10-24T16:20:57.499432Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:2 appended 2025-10-24T16:20:57.499562Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.3/10 2025-10-24T16:20:57.499570Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 3 2025-10-24T16:20:57.499571Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:3 appended 2025-10-24T16:20:57.499693Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.4/10 2025-10-24T16:20:57.499699Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 4 2025-10-24T16:20:57.499705Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:4 appended 2025-10-24T16:20:57.499823Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.5/10 2025-10-24T16:20:57.499830Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 5 2025-10-24T16:20:57.499832Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:5 appended 2025-10-24T16:20:57.499973Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.6/10 2025-10-24T16:20:57.499983Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 6 2025-10-24T16:20:57.499985Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:6 appended 2025-10-24T16:20:57.500108Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.7/10 2025-10-24T16:20:57.500116Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 7 2025-10-24T16:20:57.500118Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:7 appended 2025-10-24T16:20:57.500241Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.8/10 2025-10-24T16:20:57.500248Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 8 2025-10-24T16:20:57.500249Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:367: [72075186224037897][Partition][0][StateIdle] Part 6:8 appended 2025-10-24T16:20:57.500254Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:287: [72075186224037897][Partition][0][StateIdle] Try append part 6.9/10 2025-10-24T16:20:57.500256Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:58: [72075186224037897][Partition][0][StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id_5_0' seqNo 1 partNo 9 |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests >> ydb-tests-olap-s3_import::import_test [GOOD] |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a >> SystemView::QueryStats [GOOD] |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/s3_import/import_test >> ydb-tests-olap-s3_import::import_test [GOOD] >> SystemView::QueryStatsFields >> AuthSystemView::AuthOwners_TableRange-EnableRealSystemViewPaths [GOOD] >> AuthSystemView::AuthPermissions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_50k_100times_50tx [GOOD] Test command err: 2025-10-24T16:20:40.917976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823997542455903:2211];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:40.918034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:40.928171Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00139d/r3tmp/tmp3Y3XnP/pdisk_1.dat 2025-10-24T16:20:40.984405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:41.010771Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:41.010983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564823997542455727:2081] 1761322840914762 != 1761322840914765 TServer::EnableGrpc on GrpcPort 18274, node 1 2025-10-24T16:20:41.023569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:41.023644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:41.024804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:41.030941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00139d/r3tmp/yandexSU1ATM.tmp 2025-10-24T16:20:41.030956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00139d/r3tmp/yandexSU1ATM.tmp 2025-10-24T16:20:41.031009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00139d/r3tmp/yandexSU1ATM.tmp 2025-10-24T16:20:41.031052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:41.044187Z INFO: TTestServer started on Port 10600 GrpcPort 18274 TClient is connected to server localhost:10600 PQClient connected to localhost:18274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:41.106334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:41.118667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:41.166574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:41.519637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001837423834:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.519645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001837423851:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.519661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.519727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001837423862:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.519738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.520178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001837423890:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.520196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.520227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001837423894:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.520234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.520412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:41.522235Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824001837423861:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:41.552891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.559539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.578802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:41.597691Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824001837424168:2591] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824001837424213:2622] 2025-10-24T16:20:41.918438Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:45.917649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564823997542455903:2211];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.917680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:46.806244Z :Write_And_Read_Big_Messages_1 INFO: TTopicSdkTestSetup started 2025-10-24T16:20:46.808901Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:46.811961Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824023312260939:2727] connected; active server actors: 1 2025-10-24T16:20:46.812034Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:46.812133Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:46.812158Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, ... PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1710 count 10 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199649Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1720 count 10 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199653Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1730 count 10 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199659Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1740 count 10 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199663Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1750 count 15 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199667Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1765 count 8 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199674Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1773 count 8 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199677Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1781 count 10 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199681Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:371: Deleting head blob in L1. Partition 0 offset 1791 count 6 actorID [2:7564824063671356102:2472] 2025-10-24T16:20:58.199709Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1465 partno 0 count 5 parts 0 suffix '63' size 250533 2025-10-24T16:20:58.199721Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1470 partno 0 count 8 parts 0 suffix '63' size 400785 2025-10-24T16:20:58.199730Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1478 partno 0 count 5 parts 0 suffix '63' size 250533 2025-10-24T16:20:58.199739Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1483 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199748Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1493 partno 0 count 7 parts 0 suffix '63' size 350701 2025-10-24T16:20:58.199756Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1500 partno 0 count 1 parts 0 suffix '63' size 50157 2025-10-24T16:20:58.199760Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1501 partno 0 count 9 parts 0 suffix '63' size 450870 2025-10-24T16:20:58.199763Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1510 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199769Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1520 partno 0 count 11 parts 0 suffix '63' size 551038 2025-10-24T16:20:58.199774Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1531 partno 0 count 9 parts 0 suffix '63' size 450870 2025-10-24T16:20:58.199777Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1540 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199782Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1550 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199788Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1560 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199791Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1570 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199795Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1580 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199797Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1590 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199802Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1600 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199809Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1601 partno 0 count 9 parts 0 suffix '63' size 450869 2025-10-24T16:20:58.199812Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1610 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199817Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1620 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199822Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1630 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199826Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1640 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199830Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1650 partno 0 count 10 parts 0 suffix '63' size 500953 2025-10-24T16:20:58.199839Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1660 partno 0 count 5 parts 0 suffix '63' size 250533 2025-10-24T16:20:58.199843Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1665 partno 0 count 5 parts 0 suffix '63' size 250533 2025-10-24T16:20:58.199867Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1670 partno 0 count 5 parts 0 suffix '63' size 250533 2025-10-24T16:20:58.199878Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1675 partno 0 count 4 parts 0 suffix '63' size 200449 2025-10-24T16:20:58.199882Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1679 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199890Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1680 partno 0 count 4 parts 0 suffix '63' size 200449 2025-10-24T16:20:58.199909Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1684 partno 0 count 2 parts 0 suffix '63' size 100281 2025-10-24T16:20:58.199918Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1686 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199922Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1687 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199926Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1688 partno 0 count 2 parts 0 suffix '63' size 100281 2025-10-24T16:20:58.199930Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1690 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199935Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1691 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199940Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1692 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199945Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1693 partno 0 count 2 parts 0 suffix '63' size 100281 2025-10-24T16:20:58.199953Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1695 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199957Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1696 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199964Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1697 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199967Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1698 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199971Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1699 partno 0 count 1 parts 0 suffix '63' size 50156 2025-10-24T16:20:58.199975Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1700 partno 0 count 1 parts 0 suffix '63' size 50157 2025-10-24T16:20:58.199980Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1701 partno 0 count 9 parts 0 suffix '63' size 450870 2025-10-24T16:20:58.199988Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1710 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199993Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1720 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.199997Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1730 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.200006Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1740 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.200010Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1750 partno 0 count 15 parts 0 suffix '63' size 751488 2025-10-24T16:20:58.200015Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1765 partno 0 count 8 parts 0 suffix '63' size 400786 2025-10-24T16:20:58.200023Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1773 partno 0 count 8 parts 0 suffix '63' size 400786 2025-10-24T16:20:58.200027Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1781 partno 0 count 10 parts 0 suffix '63' size 500954 2025-10-24T16:20:58.200036Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037895' partition 0 offset 1791 partno 0 count 6 parts 0 suffix '63' size 300618 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete >> ShowCreateView::WithPairedTablePathPrefix [GOOD] >> ShowCreateView::WithTwoTablePathPrefixes >> AuthSystemView::AuthGroupMembers_TableRange [GOOD] >> AuthSystemView::AuthOwners+EnableRealSystemViewPaths >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::TopPartitionsByCpuFields >> SystemView::GroupsFields [GOOD] >> SystemView::Describe+EnableRealSystemViewPaths >> SystemView::QueryStatsFields [GOOD] >> SystemView::PartitionStatsTtlFields >> IndexBuildTestReboots::DropIndexWithDataColumns [GOOD] >> IndexBuildTestReboots::DropIndexWithDataColumnsUniq [GOOD] >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByCpuRanges >> SystemView::ShowCreateTableReadReplicas [GOOD] >> IndexBuildTestReboots::DropIndex [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium >> IndexBuildTestReboots::DropIndexUniq >> AuthSystemView::AuthPermissions [GOOD] >> TxUsage::Write_And_Read_Gigant_Messages_2 [GOOD] >> AuthSystemView::AuthPermissions_Access >> SystemView::Describe+EnableRealSystemViewPaths [GOOD] >> SystemView::Describe-EnableRealSystemViewPaths ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::DropIndexWithDataColumns [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.749218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.749240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.749246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.749254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.749260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.749265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.749274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.749290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.749418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.749477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.765774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.765804Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.765889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.768059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.768116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.768147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.770093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.770190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.770273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.770325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.770971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.770999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.771149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.771154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.771175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.771180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.771185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.771199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.772212Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.788530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.788600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.788646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.788653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.788707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.788738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.789376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.789415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.789456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.789464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.789472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.789476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.789838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.789847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.789852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.790162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.790171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.790175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.790180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.790691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.790967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.790988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.791117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.791133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... hing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:21:01.801227Z node 109 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-10-24T16:21:01.801379Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.801392Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.801720Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.801733Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.802459Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.802854Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5943: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 468151437594 } TabletId: 72075186233409546 State: 4 2025-10-24T16:21:01.802871Z node 109 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-10-24T16:21:01.803152Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:21:01.803168Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:21:01.803239Z node 109 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:21:01.803275Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:21:01.803328Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-10-24T16:21:01.804354Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:21:01.804366Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:21:01.804379Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:21:01.804385Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:21:01.804391Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:21:01.804953Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:21:01.804966Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-10-24T16:21:01.805047Z node 109 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-10-24T16:21:01.805102Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:21:01.805108Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-10-24T16:21:01.805163Z node 109 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805179Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:21:01.805184Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [109:582:2543] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:21:01.805252Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805295Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 52us result status StatusSuccess 2025-10-24T16:21:01.805406Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805484Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805505Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0" took 23us result status StatusPathDoesNotExist 2025-10-24T16:21:01.805524Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805567Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.805596Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" took 31us result status StatusPathDoesNotExist 2025-10-24T16:21:01.805612Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> AuthSystemView::AuthOwners+EnableRealSystemViewPaths [GOOD] >> AuthSystemView::AuthEffectivePermissions+EnableRealSystemViewPaths >> SystemView::Describe-EnableRealSystemViewPaths [GOOD] >> SystemView::DescribeSystemFolder+EnableRealSystemViewPaths >> SystemView::ShowCreateTable [GOOD] >> SystemView::ShowCreateTableChangefeeds >> ShowCreateView::WithTwoTablePathPrefixes [GOOD] >> SystemView::CollectPreparedQueries >> TxUsage::Write_And_Read_Gigant_Messages_1 [GOOD] >> ydb-tests-sql::import_test [GOOD] |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging >> AuthSystemView::AuthPermissions_Access [GOOD] >> AuthSystemView::AuthPermissions_ResultOrder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_And_Read_Gigant_Messages_2 [GOOD] Test command err: 2025-10-24T16:20:42.119010Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824007335381519:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:42.119203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:42.123631Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001390/r3tmp/tmpwNOpWk/pdisk_1.dat 2025-10-24T16:20:42.148797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:42.157838Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:42.158017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824007335381491:2081] 1761322842118754 != 1761322842118757 TServer::EnableGrpc on GrpcPort 15929, node 1 2025-10-24T16:20:42.164119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001390/r3tmp/yandexX0dkZi.tmp 2025-10-24T16:20:42.164131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001390/r3tmp/yandexX0dkZi.tmp 2025-10-24T16:20:42.164183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001390/r3tmp/yandexX0dkZi.tmp 2025-10-24T16:20:42.164216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:42.166823Z INFO: TTestServer started on Port 29956 GrpcPort 15929 TClient is connected to server localhost:29956 PQClient connected to localhost:15929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:42.222241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:42.222267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:42.223421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:42.226030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:42.230605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:42.397049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007335382307:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007335382288:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007335382311:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:42.397842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007335382341:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824007335382344:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.397882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.399308Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824007335382310:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:42.428072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.430183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:42.434124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.445788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7564824007335382665:2611] 2025-10-24T16:20:42.498506Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824007335382666:2612] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:43.120708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:47.119466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824007335381519:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:47.119511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:47.704317Z :Write_And_Read_Huge_Messages_1 INFO: TTopicSdkTestSetup started 2025-10-24T16:20:47.707469Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:47.711819Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824028810219412:2729] connected; active server actors: 1 2025-10-24T16:20:47.711930Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:47.712316Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:47.712348Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, ... e = 61000206 to budget 2025-10-24T16:21:02.200581Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] In ContinueReadingDataImpl, ReadSizeBudget = 61000206, ReadSizeServerDelta = -8571406 2025-10-24T16:21:02.200672Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-10-24T16:21:02.202019Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.202034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.202037Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.204045Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 grpc read done: success# 1, data# { read_request { bytes_size: 61000206 } } 2025-10-24T16:21:02.204120Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 got read request: guid# 2ba8838b-db083151-5122d5d5-b0515ece 2025-10-24T16:21:02.226178Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.226194Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.226197Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.303990Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.304005Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.304009Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.331036Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.331050Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.331052Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.403732Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.403744Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.403746Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.433643Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.433658Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.433659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.491490Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2401: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 checking auth because of timeout 2025-10-24T16:21:02.491537Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 auth for : test-consumer 2025-10-24T16:21:02.491753Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 Handle describe topics response 2025-10-24T16:21:02.491787Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 auth is DEAD 2025-10-24T16:21:02.491795Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:1046: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 auth ok: topics# 1, initDone# 1 2025-10-24T16:21:02.507634Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.507651Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.507653Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.533665Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.533680Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.533683Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.608026Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.608040Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.608042Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.635942Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.635957Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.635959Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.675309Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2025-10-24T16:21:02.675342Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] The application data is transferred to the client. Number of messages 1, size 61000000 bytes 2025-10-24T16:21:02.675357Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] Returning serverBytesSize = 0 to budget 0 1 2025-10-24T16:21:02.681410Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] Commit offsets [3, 4). Partition stream id: 1 2025-10-24T16:21:02.681802Z :INFO: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] Closing read session. Close timeout: 0.000000s 2025-10-24T16:21:02.681818Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:3:3 2025-10-24T16:21:02.681827Z :INFO: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2210 BytesRead: 244000000 MessagesRead: 4 BytesReadCompressed: 244000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:21:02.681848Z :NOTICE: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:21:02.681857Z :DEBUG: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] [] Abort session to cluster 2025-10-24T16:21:02.681986Z :NOTICE: [/Root] [/Root] [e5ddefb1-ade639c6-5c6e6592-d944840] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:21:02.684097Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2025-10-24T16:21:02.684173Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 grpc closed 2025-10-24T16:21:02.684187Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_2_1_18225208489016270276_v1 is DEAD 2025-10-24T16:21:02.684605Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037895][topic_A] pipe [2:7564824082803446481:2559] disconnected. 2025-10-24T16:21:02.684609Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037895][topic_A] pipe [2:7564824082803446481:2559] disconnected; active server actors: 1 2025-10-24T16:21:02.684612Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037895][topic_A] pipe [2:7564824082803446481:2559] client test-consumer disconnected session test-consumer_2_1_18225208489016270276_v1 2025-10-24T16:21:02.684641Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037894] Destroy direct read session test-consumer_2_1_18225208489016270276_v1 2025-10-24T16:21:02.684647Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [2:7564824082803446485:2563] destroyed 2025-10-24T16:21:02.684663Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_2_1_18225208489016270276_v1 2025-10-24T16:21:02.716009Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.716026Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.716028Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.737563Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.737590Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.737593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.818067Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.818086Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.818089Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:02.841230Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:02.841244Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:02.841247Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist |57.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::DropIndexWithDataColumnsUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.688943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.688965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.688969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.688976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.688982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.688985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.688992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.689004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.689118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.689169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.703239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.703264Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.703347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.705725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.705783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.705813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.707959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.708057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.708150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.708200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.708954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.708985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.709155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.709163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.709186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.709192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.709196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.709213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.710291Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.725452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.725544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.725601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.725608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.725660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.725693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.726419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.726464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.726513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.726522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.726527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.726530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.726875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.726882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.726886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.727117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.727123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.727127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.727133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.727621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.728009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.728047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.728212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.728230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... hing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:21:01.931569Z node 109 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-10-24T16:21:01.931681Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.931691Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.931958Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.931969Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.932491Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:01.932762Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5943: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 468151437594 } TabletId: 72075186233409546 State: 4 2025-10-24T16:21:01.932781Z node 109 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-10-24T16:21:01.933034Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:21:01.933047Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:21:01.933099Z node 109 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:21:01.933124Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:21:01.933161Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-10-24T16:21:01.933815Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:21:01.933829Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:21:01.933842Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:21:01.933847Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:21:01.933851Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:21:01.934275Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:21:01.934288Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-10-24T16:21:01.934373Z node 109 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-10-24T16:21:01.934428Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:21:01.934433Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-10-24T16:21:01.934474Z node 109 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934487Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:21:01.934491Z node 109 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [109:582:2543] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:21:01.934542Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934573Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 38us result status StatusSuccess 2025-10-24T16:21:01.934659Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934709Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934724Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0" took 17us result status StatusPathDoesNotExist 2025-10-24T16:21:01.934737Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934768Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:01.934776Z node 109 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" took 10us result status StatusPathDoesNotExist 2025-10-24T16:21:01.934786Z node 109 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> ydb-tests-stress-show_create-view-tests::import_test [GOOD] >> ydb-tests-fq-streaming_optimize::import_test [GOOD] >> ydb-tests-functional-canonical::import_test [GOOD] >> SystemView::DescribeSystemFolder+EnableRealSystemViewPaths [GOOD] >> SystemView::DescribeSystemFolder-EnableRealSystemViewPaths >> ydb-tests-library-ut::import_test [GOOD] >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] >> SystemView::CollectScanQueries [GOOD] >> SystemView::CollectScriptingQueries >> AuthSystemView::AuthEffectivePermissions+EnableRealSystemViewPaths [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] >> SystemView::ShowCreateTableTtlSettings >> AuthSystemView::AuthEffectivePermissions-EnableRealSystemViewPaths |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/show_create/view/tests/import_test >> ydb-tests-stress-show_create-view-tests::import_test [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/import_test >> ydb-tests-fq-streaming_optimize::import_test [GOOD] |57.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] >> ydb-tests-functional-postgresql::import_test [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_And_Read_Gigant_Messages_1 [GOOD] Test command err: 2025-10-24T16:20:41.979914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824001724844247:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:41.979931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:41.982964Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001398/r3tmp/tmpTj74yQ/pdisk_1.dat 2025-10-24T16:20:42.011042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:42.018694Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:42.018866Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824001724844223:2081] 1761322841979784 != 1761322841979787 TServer::EnableGrpc on GrpcPort 3866, node 1 2025-10-24T16:20:42.024942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001398/r3tmp/yandexRHa2H2.tmp 2025-10-24T16:20:42.024952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001398/r3tmp/yandexRHa2H2.tmp 2025-10-24T16:20:42.025001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001398/r3tmp/yandexRHa2H2.tmp 2025-10-24T16:20:42.025035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:42.027518Z INFO: TTestServer started on Port 23011 GrpcPort 3866 TClient is connected to server localhost:23011 PQClient connected to localhost:3866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:42.047392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:42.079998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:20:42.086903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:42.086924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:42.088078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-10-24T16:20:42.215662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:42.272064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824006019812328:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.272084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824006019812342:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.272099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.272228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824006019812359:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.272255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.272871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:42.273012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824006019812389:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.273026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.273056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824006019812391:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.273064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:42.274727Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824006019812358:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:42.305150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.313007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:42.328693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:42.373700Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824006019812671:2594] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824006019812700:2616] 2025-10-24T16:20:42.981840Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:46.980072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824001724844247:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:46.980109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:47.575771Z :Write_And_Read_Big_Messages_2 INFO: TTopicSdkTestSetup started 2025-10-24T16:20:47.578993Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:47.583142Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824027494649435:2726] connected; active server actors: 1 2025-10-24T16:20:47.583277Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:47.583553Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:47.583587Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 720 ... tching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.231555Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.231557Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.326245Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.326258Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.326261Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.331951Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.331964Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.331967Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.426392Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.426410Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.426412Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.433189Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.433204Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.433207Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.528821Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.528833Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.528836Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.532864Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.532878Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.532881Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.630504Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.630520Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.630523Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.635601Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.635613Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.635615Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.735958Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.735972Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.735975Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.736250Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.736257Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.736259Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.840019Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.840034Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.840038Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.840062Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.840064Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.840066Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.900080Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2401: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 checking auth because of timeout 2025-10-24T16:21:05.900117Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 auth for : test-consumer 2025-10-24T16:21:05.900453Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 Handle describe topics response 2025-10-24T16:21:05.900479Z node 2 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 auth is DEAD 2025-10-24T16:21:05.900498Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:1046: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 auth ok: topics# 1, initDone# 1 2025-10-24T16:21:05.921722Z :INFO: [/Root] [/Root] [57897dba-76d17ce1-fbc973a0-f77dc51b] Closing read session. Close timeout: 0.000000s 2025-10-24T16:21:05.921745Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:3:4 2025-10-24T16:21:05.921759Z :INFO: [/Root] [/Root] [57897dba-76d17ce1-fbc973a0-f77dc51b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 6053 BytesRead: 244000000 MessagesRead: 4 BytesReadCompressed: 244000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:21:05.921779Z :NOTICE: [/Root] [/Root] [57897dba-76d17ce1-fbc973a0-f77dc51b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:21:05.921788Z :DEBUG: [/Root] [/Root] [57897dba-76d17ce1-fbc973a0-f77dc51b] [] Abort session to cluster 2025-10-24T16:21:05.922085Z :NOTICE: [/Root] [/Root] [57897dba-76d17ce1-fbc973a0-f77dc51b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:21:05.922978Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 grpc read done: success# 0, data# { } 2025-10-24T16:21:05.922997Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 grpc read failed 2025-10-24T16:21:05.923005Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 grpc closed 2025-10-24T16:21:05.923147Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_2_1_13766917910909709918_v1 is DEAD 2025-10-24T16:21:05.923495Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037895][topic_A] pipe [2:7564824077178619271:2515] disconnected. 2025-10-24T16:21:05.923498Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037895][topic_A] pipe [2:7564824077178619271:2515] disconnected; active server actors: 1 2025-10-24T16:21:05.923500Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037895][topic_A] pipe [2:7564824077178619271:2515] client test-consumer disconnected session test-consumer_2_1_13766917910909709918_v1 2025-10-24T16:21:05.923524Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037894] Destroy direct read session test-consumer_2_1_13766917910909709918_v1 2025-10-24T16:21:05.923532Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [2:7564824077178619274:2518] destroyed 2025-10-24T16:21:05.923546Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_2_1_13766917910909709918_v1 2025-10-24T16:21:05.941637Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.941654Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.941656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:05.941678Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:05.941681Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:05.941683Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:21:06.044162Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:06.044181Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:06.044185Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:21:06.044221Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:06.044223Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:06.044225Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] |57.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] >> SystemView::CollectScriptingQueries [GOOD] >> SysViewsRegistry::SysViews [GOOD] >> SysViewsRegistry::RewrittenSysViews [GOOD] >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |56.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] >> SystemView::DescribeSystemFolder-EnableRealSystemViewPaths [GOOD] >> SystemView::DescribeAccessDenied >> ydb-tests-fq-yds::import_test [GOOD] >> AuthSystemView::AuthPermissions_ResultOrder [GOOD] >> AuthSystemView::AuthPermissions_Selects >> ydb-tests-functional-scheme_shard::import_test [GOOD] |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer >> AuthSystemView::AuthEffectivePermissions-EnableRealSystemViewPaths [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SysViewsRegistry::RewrittenSysViews [GOOD] Test command err: 2025-10-24T16:20:47.161086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824027241884100:2154];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:47.161138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00163d/r3tmp/tmpipFfZr/pdisk_1.dat 2025-10-24T16:20:47.201463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:47.215077Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15278, node 1 2025-10-24T16:20:47.234843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:47.234859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:47.234860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:47.234905Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15579 2025-10-24T16:20:47.261932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:47.261967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:47.268228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:47.286526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:47.370070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:47.508169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824027241885080:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.508176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824027241885072:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.508194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.508271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824027241885087:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.508288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.509087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:47.513051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824027241885086:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:20:47.575154Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824027241885167:2744] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:48.162853Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:48.164102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:48.249423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:20:48.301783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:48.369821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:48.435503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:20:48.492255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:20:48.542271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:48.555260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-10-24T16:20:48.966283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:20:49.315269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715710:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:20:49.328837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:49.363182Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7564824035831822258:2860], owner: [1:7564824035831822255:2858], scan id: 0, sys view info: Type: EShowCreate 2025-10-24T16:20:49.363301Z node 1 :SYSTEM_VIEWS INFO: show_create.cpp:107: Scan prepared, actor: [1:7564824035831822258:2860] 2025-10-24T16:20:49.365120Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7564824035831822258:2860], row count: 1, finished: 1 2025-10-24T16:20:49.365141Z node 1 :SYSTEM_VIE ... ault, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:21:07.282939Z node 27 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [27:7564824112972354732:2388] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:07.314463Z node 27 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgc6gcbeqktmpdatn2w2xv, Database: , SessionId: ydb://session/3?node_id=27&id=NjFmZTg1ZTktYzE5MDRhYTMtNWY0NzlhMjUtNzM0ZDU2NWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:07.368954Z node 27 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgc6m7ahjap0q47hmd1fsk, Database: , SessionId: ydb://session/3?node_id=27&id=YjZlMjg1Y2UtZWM0Nzg2MzItNWFmNTk0NmItYzVlZTJiZg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:07.370931Z node 27 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322867409, txId: 281474976715662] shutting down 2025-10-24T16:21:07.428615Z node 27 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgc6nr53a73maaav6qt26t, Database: , SessionId: ydb://session/3?node_id=27&id=MWZkMDJmZDAtMTZjY2IzYy1iYzcwYmIzZC1mNTM2MzIxYg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:07.429477Z node 27 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [27:7564824112972354855:2352], owner: [27:7564824112972354852:2350], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:07.441238Z node 27 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [27:7564824112972354855:2352], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:07.441352Z node 27 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [27:7564824112972354855:2352], row count: 2, finished: 1 2025-10-24T16:21:07.441385Z node 27 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [27:7564824112972354855:2352], owner: [27:7564824112972354852:2350], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:07.442375Z node 27 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322867421, txId: 281474976715664] shutting down 2025-10-24T16:21:07.920168Z node 28 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:07.920211Z node 28 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00163d/r3tmp/tmpDMHLwn/pdisk_1.dat 2025-10-24T16:21:07.988706Z node 28 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24328, node 28 2025-10-24T16:21:08.020308Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:08.020333Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:08.024148Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:08.135380Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:08.135391Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:08.135393Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:08.135450Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:08.204582Z node 28 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:08.269353Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:08.274502Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:21:08.304274Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:08.656548Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7564824116828944743:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:08.656565Z node 28 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:08.656664Z node 28 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [28:7564824116828944755:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:08.657352Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:08.659973Z node 28 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [28:7564824116828944757:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:21:08.728666Z node 28 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [28:7564824116828944818:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:08.758651Z node 28 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgc7xg7bby3yzjn5fzaweq, Database: , SessionId: ydb://session/3?node_id=28&id=NWM3YTNmNmYtMzU3YWZmNTItOWI3NDNlOWYtZDUwMTY1Y2E=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:08.822011Z node 28 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:08.840703Z node 28 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgc81x9te9yhv0n7nt5mqm, Database: , SessionId: ydb://session/3?node_id=28&id=ZmQwYTk3NWMtNjZlYTdkOWQtYjRmMTY2M2QtYmYwYTBmMjE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:08.849247Z node 28 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322868879, txId: 281474976715662] shutting down 2025-10-24T16:21:08.884401Z node 28 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgc84032m99jw6vbjmw6pd, Database: , SessionId: ydb://session/3?node_id=28&id=MWViZDQzZDQtYzQzNjBlMjUtM2Y3M2VjNjctNDlkNzY1NjM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:08.885111Z node 28 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [28:7564824116828944940:2354], owner: [28:7564824116828944936:2352], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:08.892141Z node 28 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [28:7564824116828944940:2354], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:08.892241Z node 28 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [28:7564824116828944940:2354], row count: 2, finished: 1 2025-10-24T16:21:08.892265Z node 28 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [28:7564824116828944940:2354], owner: [28:7564824116828944936:2352], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:08.893090Z node 28 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322868880, txId: 281474976715664] shutting down |55.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::SystemViewFailOps+EnableRealSystemViewPaths |55.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] >> SystemView::ShowCreateTableChangefeeds [GOOD] >> SystemView::ShowCreateTableColumnAlterColumn |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |55.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut >> SystemView::DescribeAccessDenied [GOOD] >> AuthSystemView::AuthPermissions_Selects [GOOD] >> SystemView::SystemViewFailOps+EnableRealSystemViewPaths [GOOD] >> SystemView::SystemViewFailOps-EnableRealSystemViewPaths >> ydb-tests-stress-kv-tests::import_test [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> SystemView::TopPartitionsByCpuFollowers |53.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |53.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> SystemView::SystemViewFailOps-EnableRealSystemViewPaths [GOOD] >> SystemView::TabletsFields >> ydb-tests-functional-hive::import_test [GOOD] |53.5%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |53.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |53.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet |53.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsShards |53.4%| RESOURCE $(sbr:770480022) - 6.41 MB >> ydb-tests-functional-secrets::import_test [GOOD] |53.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |53.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |53.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/import_test >> ydb-tests-functional-secrets::import_test [GOOD] |53.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> ydb-tests-stress-node_broker-tests::import_test [GOOD] >> SystemView::TabletsShards [GOOD] >> SystemView::TabletsFollowers >> SystemView::ShowCreateTableColumnAlterColumn [GOOD] >> SystemView::ShowCreateTableColumnUpsertOptions >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |53.0%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose >> ydb-tests-functional-clickbench::import_test [GOOD] >> ydb-tests-functional-scheme_tests::import_test [GOOD] |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/node_broker/tests/import_test >> ydb-tests-stress-node_broker-tests::import_test [GOOD] |53.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TGRpcCmsTest::DescribeOptionsTest >> ydb-tests-olap-delete::import_test [GOOD] >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so >> ydb-tests-solomon-reading::import_test [GOOD] >> ydb-tests-datashard-select::import_test [GOOD] >> ydb-tests-fq-http_api::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] >> ydb-tests-stress-viewer-tests::import_test [GOOD] >> ydb-tests-functional-ttl::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |53.0%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> AuthSystemView::AuthEffectivePermissions-EnableRealSystemViewPaths [GOOD] Test command err: 2025-10-24T16:20:45.361656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824017347613530:2082];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.361678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001621/r3tmp/tmpevEhKb/pdisk_1.dat 2025-10-24T16:20:45.396476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:45.408622Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63503, node 1 2025-10-24T16:20:45.423354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:45.423364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:45.423366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:45.423399Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:45.463551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:45.463591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:45.467758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:45.481242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-10-24T16:20:45.481296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.481320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-10-24T16:20:45.481363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-10-24T16:20:45.481373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976715658:0 type: TxCreateExtSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 2] source path: 2025-10-24T16:20:45.481396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-10-24T16:20:45.481416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:20:45.481421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:20:45.481440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-10-24T16:20:45.481445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-10-24T16:20:45.482104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-10-24T16:20:45.482151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-10-24T16:20:45.482191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-10-24T16:20:45.482200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-10-24T16:20:45.482234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:20:45.482255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-10-24T16:20:45.482258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7564824017347614079:2375], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-10-24T16:20:45.482260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7564824017347614079:2375], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-10-24T16:20:45.482265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.482269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.482272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715658:0, at tablet# 72057594046644480 2025-10-24T16:20:45.482276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2025-10-24T16:20:45.482821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 waiting... 2025-10-24T16:20:45.483317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.483335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.483337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-10-24T16:20:45.483339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-10-24T16:20:45.483342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-10-24T16:20:45.483376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.483389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.483390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-10-24T16:20:45.483392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-10-24T16:20:45.483394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-10-24T16:20:45.483399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-10-24T16:20:45.483535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:20:45.483541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-10-24T16:20:45.483544Z node ... 1:11.180189Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [.metadata,Dir2,Table1] }] } 2025-10-24T16:21:11.180207Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 1, finished: 0 2025-10-24T16:21:11.184363Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.188215Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata TableId: [72075186224037888:4:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [workload_manager] }] } 2025-10-24T16:21:11.188242Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 1, finished: 0 2025-10-24T16:21:11.188252Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.193361Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager TableId: [72075186224037888:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [pools] }] } 2025-10-24T16:21:11.193394Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 1, finished: 0 2025-10-24T16:21:11.193445Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager/pools TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.196010Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager/pools TableId: [72075186224037888:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-10-24T16:21:11.196038Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 1, finished: 0 2025-10-24T16:21:11.196052Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.200329Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/.metadata/workload_manager/pools/default TableId: [72075186224037888:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-10-24T16:21:11.200376Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 5, finished: 0 2025-10-24T16:21:11.203745Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.204184Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-10-24T16:21:11.204203Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 2, finished: 0 2025-10-24T16:21:11.204211Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:11.208146Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root/Tenant1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-10-24T16:21:11.208168Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824132003823398:2389], row count: 1, finished: 0 2025-10-24T16:21:11.208349Z node 51 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [51:7564824132003823398:2389], owner: [51:7564824132003823395:2387], scan id: 0, sys view info: Type: EAuthEffectivePermissions SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:11.208969Z node 51 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322871163, txId: 281474976715680] shutting down 2025-10-24T16:21:11.208993Z node 51 :SYSTEM_VIEWS TRACE: sysview_service.cpp:901: Collect query stats: service id# [51:7564824119118919612:2079], database# /Root/Tenant1, query hash# 17041527774448355266, cpu time# 53755 2025-10-24T16:21:11.226686Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 52 2025-10-24T16:21:11.226805Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(52, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:11.226828Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 54 2025-10-24T16:21:11.226878Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(54, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:11.226888Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 55 2025-10-24T16:21:11.226922Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(55, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:11.226934Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 53 2025-10-24T16:21:11.226995Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(53, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:11.225676Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:11.226385Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 >> ydb-tests-functional-ydb_cli::import_test [GOOD] >> SystemView::TabletsRanges [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/delete/import_test >> ydb-tests-olap-delete::import_test [GOOD] >> ydb-tests-functional-cms::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so >> ydb-tests-functional-autoconfig::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |52.9%| RESOURCE $(sbr:4966407557) >> SystemView::ShowCreateTableTtlSettings [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> SystemView::ShowCreateTableTemporary |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest >> ydb-tests-functional-tpc-medium::import_test [GOOD] |52.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/select/import_test >> ydb-tests-datashard-select::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/solomon/reading/import_test >> ydb-tests-solomon-reading::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/viewer/tests/import_test >> ydb-tests-stress-viewer-tests::import_test [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-10-24T16:21:20.229420Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824170871818761:2089];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:20.232086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0015be/r3tmp/tmpAvOT24/pdisk_1.dat 2025-10-24T16:21:20.287423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:20.300996Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28562, node 1 2025-10-24T16:21:20.316075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:20.316085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:20.316087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:20.316128Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:20.333171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:20.333202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:20.335631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:20.337675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:31069 2025-10-24T16:21:20.361773Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2025-10-24T16:21:20.361786Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2025-10-24T16:21:20.363267Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2025-10-24T16:21:20.393152Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7564824170871819469:2301], Recipient [1:7564824170871819171:2198]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:60520" } 2025-10-24T16:21:20.393171Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-10-24T16:21:20.393604Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } 2025-10-24T16:21:20.536726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/import_test >> ydb-tests-functional-cms::import_test [GOOD] |52.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |52.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> ydb-tests-fq-s3::import_test [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TBlobStorageProxyTest::TestProxySimpleDiscover >> DataStreams::TestGetRecordsStreamWithSingleShard >> GroupWriteTest::WriteHardRateDispatcher |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so >> TBlobStorageProxyTest::TestVPutVGetPersistence |52.9%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> SystemView::ShowCreateTableColumnUpsertOptions [GOOD] |52.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |52.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TBlobStorageProxyTest::TestPutGetMany >> SystemView::ShowCreateTableColumnUpsertIndex >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] |51.8%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |51.8%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] |51.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] >> TBlobStorageProxyTest::TestBlock >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] |51.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TBlobStorageProxyTest::TestDoubleEmptyGet >> SystemView::ShowCreateTableTemporary [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |51.7%| [AR] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> SystemView::ShowCreateTableSequences |51.7%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups >> TTxAllocatorClientTest::AllocateOverTheEdge >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] |51.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/import_test >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] |51.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TTxAllocatorClientTest::Boot >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |51.6%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql >> TTxAllocatorClientTest::InitiatingRequest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TKeyValueTracingTest::ReadSmall >> TBlobStorageProxyTest::TestPartialGetMirror |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |51.6%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |51.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TTxAllocatorClientTest::Boot [GOOD] |51.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |51.6%| [AR] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/no_llvm/libdq-comp_nodes-no_llvm.a >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-10-24T16:20:48.385216Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824033219041102:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:48.385244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00167b/r3tmp/tmpTWTMMn/pdisk_1.dat 2025-10-24T16:20:48.412469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:48.425759Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9786, node 1 2025-10-24T16:20:48.440187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:48.440201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:48.440203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:48.440248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14590 TClient is connected to server localhost:14590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:20:48.487272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:48.487294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:48.491143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:48.560081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:48.564598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) waiting... 2025-10-24T16:20:48.568960Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7564824033910691265:2173];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:48.568991Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:48.569348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:48.569375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-10-24T16:20:48.570442Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:20:48.570651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:48.571153Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:20:48.580361Z node 5 :SYSTEM_VIEWS INFO: processor_impl.cpp:41: [72075186224037893] OnActivateExecutor 2025-10-24T16:20:48.580375Z node 5 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:15: [72075186224037893] TTxInitSchema::Execute 2025-10-24T16:20:48.586161Z node 5 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:42: [72075186224037893] TTxInitSchema::Complete 2025-10-24T16:20:48.586182Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:136: [72075186224037893] TTxInit::Execute 2025-10-24T16:20:48.586264Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:257: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-10-24T16:20:48.586274Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:284: [72075186224037893] Loading interval metrics: query count# 0 2025-10-24T16:20:48.586279Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:362: [72075186224037893] Loading interval query tops: total query count# 0 2025-10-24T16:20:48.586284Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:408: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-10-24T16:20:48.586289Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 6, result count# 0 2025-10-24T16:20:48.586293Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 7, result count# 0 2025-10-24T16:20:48.586298Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 8, result count# 0 2025-10-24T16:20:48.586303Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 9, result count# 0 2025-10-24T16:20:48.586308Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 10, result count# 0 2025-10-24T16:20:48.586317Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 11, result count# 0 2025-10-24T16:20:48.586320Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 12, result count# 0 2025-10-24T16:20:48.586323Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 13, result count# 0 2025-10-24T16:20:48.586328Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 14, result count# 0 2025-10-24T16:20:48.586331Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:51: [72075186224037893] Loading results: table# 15, result count# 0 2025-10-24T16:20:48.586336Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:129: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-10-24T16:20:48.586343Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:129: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-10-24T16:20:48.586348Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:82: [72075186224037893] Loading results: table# 17, result count# 0 2025-10-24T16:20:48.586351Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:82: [72075186224037893] Loading results: table# 18, result count# 0 2025-10-24T16:20:48.586359Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:82: [72075186224037893] Loading results: table# 20, result count# 0 2025-10-24T16:20:48.586367Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:82: [72075186224037893] Loading results: table# 21, result count# 0 2025-10-24T16:20:48.586392Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:333: [72075186224037893] Reset: interval end# 2025-10-24T16:20:45.000000Z 2025-10-24T16:20:48.586514Z node 5 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:779: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7564824033910691164:2081], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-10-24T16:20:48.586703Z node 5 :SYSTEM_VIEWS DEBUG: partition_stats.cpp:32: NSysView::TPartitionStatsCollector bootstrapped 2025-10-24T16:20:48.586866Z node 5 :SYSTEM_VIEWS INFO: sysview_service.cpp:861: Navigate by path id succeeded: service id# [5:7564824033910691164:2081], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2025-10-24T16:20:48.587015Z node 5 :SYSTEM_VIEWS INFO: sysview_service.cpp:887: Navigate by database succeeded: service id# [5:7564824033910691164:2081], database# /Root/Tenant1, no sysview processor 2025-10-24T16:20:48.587919Z node 5 :SYSTEM_VIEWS DEBUG: tx_init.cpp:488: [72075186224037893] TTxInit::Complete 2025-10-24T16:20:48.588401Z node 5 :SYSTEM_VIEWS DEBUG: tx_configure.cpp:20: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2025-10-24T16:20:48.588421Z node 5 :SYSTEM_VIEWS DEBUG: tx_aggregate.cpp:14: [72075186224037893] TTxAggregate::Execute 2025-10-24T16:20:48.588432Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:136: [72075186224037893] PersistQueryResults: interval end# 2025-10-24T16:20:45.000000Z, query count# 0 2025-10-24T16:20:48.588439Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-10-24T16:20:45.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588442Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-10-24T16:20:45.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588444Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-10-24T16:20:45.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588447Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-10-24T16:20:45.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588454Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-10-24T17:00:00.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588457Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-10-24T17:00:00.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588463Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-10-24T17:00:00.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.588467Z node 5 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:105: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-10-24T17:00:00.000000Z, query count# 0, persisted# 0 2025-10-24T16:20:48.589067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: ... 5-10-24T16:21:20.969305Z node 25 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [25:7564824169021590831:2425], owner: [25:7564824169021590828:2423], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:20.970106Z node 25 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322880963, txId: 281474976715677] shutting down 2025-10-24T16:21:20.992644Z node 25 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715680. Ctx: { TraceId: 01k8bgckyb1zg8cf47xx3warv8, Database: , SessionId: ydb://session/3?node_id=25&id=Y2E3ZmEwMGItYzZkNTNhZTEtZWQyY2RmNC0zNWY1NWEzMQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:20.993324Z node 25 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [25:7564824169021590863:2434], owner: [25:7564824169021590860:2432], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:20.995246Z node 25 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [25:7564824169021590863:2434], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:20.995335Z node 25 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [25:7564824169021590863:2434], row count: 4, finished: 1 2025-10-24T16:21:20.995355Z node 25 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [25:7564824169021590863:2434], owner: [25:7564824169021590860:2432], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:20.997108Z node 25 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322880991, txId: 281474976715679] shutting down 2025-10-24T16:21:21.188223Z node 25 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:21.374170Z node 26 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[26:7564824172418467916:2082];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:21.374187Z node 26 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:21.381304Z node 26 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00167b/r3tmp/tmpGnxppn/pdisk_1.dat 2025-10-24T16:21:21.402550Z node 26 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20169, node 26 2025-10-24T16:21:21.435464Z node 26 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:21.435476Z node 26 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:21.435479Z node 26 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:21.435528Z node 26 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:21.481151Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:21.481178Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:21.482227Z node 26 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:21.482925Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:21.484806Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:21:21.604315Z node 26 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:21.791930Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:21.807952Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7564824172418468783:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:21.807952Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7564824172418468794:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:21.807975Z node 26 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:21.808034Z node 26 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7564824172418468798:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:21.808052Z node 26 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:21.808498Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:21.815229Z node 26 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7564824172418468797:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:21:21.902909Z node 26 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [26:7564824172418468850:2492] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:21.931367Z node 26 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgcmrf3a2wzg44xy71wv5x, Database: , SessionId: ydb://session/3?node_id=26&id=MWI4NTA5ZjUtZTNhMWFkM2UtM2RmNzlhOS1lNzU0ZTc0OA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:21.931808Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [26:7564824172418468893:2352], owner: [26:7564824172418468892:2351], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:21.931989Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [26:7564824172418468893:2352], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:21.932097Z node 26 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [26:7564824172418468893:2352], row count: 4, finished: 1 2025-10-24T16:21:21.932111Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [26:7564824172418468893:2352], owner: [26:7564824172418468892:2351], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:21.932141Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [26:7564824172418468899:2355], owner: [26:7564824172418468892:2351], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:21.932247Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [26:7564824172418468899:2355], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:21.932537Z node 26 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [26:7564824172418468899:2355], row count: 4, finished: 1 2025-10-24T16:21:21.932559Z node 26 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [26:7564824172418468899:2355], owner: [26:7564824172418468892:2351], scan id: 0, sys view info: Type: ETablets SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:21.933449Z node 26 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322881930, txId: 281474976715661] shutting down >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TKeyValueTracingTest::ReadHuge >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TxUsage::Transactions_Conflict_On_SeqNo_Query [GOOD] >> SystemView::ShowCreateTableColumnUpsertIndex [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> SystemView::TopPartitionsByCpuFollowers [GOOD] |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-10-24T16:21:28.052585Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-10-24T16:21:28.052660Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-10-24T16:21:28.052748Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:21:28.053097Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.053191Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-10-24T16:21:28.055123Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055147Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055155Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055165Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-10-24T16:21:28.055189Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055207Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-10-24T16:21:28.055227Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-10-24T16:21:28.055358Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-10-24T16:21:28.055450Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055457Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055467Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-10-24T16:21:28.055471Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 0 to# 5000 2025-10-24T16:21:28.055493Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055513Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055526Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055543Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055561Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-10-24T16:21:28.055625Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055637Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055648Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-10-24T16:21:28.055654Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 5000 to# 10000 2025-10-24T16:21:28.055676Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055695Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055717Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055756Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-10-24T16:21:28.055771Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-10-24T16:21:28.055808Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055815Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.055825Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-10-24T16:21:28.055830Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 10000 to# 15000 2025-10-24T16:21:28.055866Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-10-24T16:21:28.431430Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-10-24T16:21:28.431497Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-10-24T16:21:28.431592Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:21:28.431947Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.432037Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-10-24T16:21:28.433971Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.433999Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.434014Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.434028Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-10-24T16:21:28.434059Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.434081Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-10-24T16:21:28.434105Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |51.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |51.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |51.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |51.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> SystemView::ShowCreateTableColumnAlterObject >> SystemView::TopPartitionsByTliFields |51.5%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |51.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |51.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |51.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> ydb-tests-datashard-ttl::import_test [GOOD] >> TStorageTenantTest::Empty [GOOD] >> TKeyValueTracingTest::WriteSmall >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::ReadHuge [GOOD] >> TKeyValueTracingTest::ReadSmall [GOOD] >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |51.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |51.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-10-24T16:21:28.779528Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-10-24T16:21:28.779595Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-10-24T16:21:28.779679Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:21:28.779965Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.780036Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-10-24T16:21:28.781594Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781622Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781632Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781646Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-10-24T16:21:28.781675Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781708Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-10-24T16:21:28.781728Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-10-24T16:21:28.781827Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#5000 2025-10-24T16:21:28.781920Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781931Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:28.781945Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-10-24T16:21:28.781950Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 0 to# 5000 |51.6%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |51.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |51.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest |51.3%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |51.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TResourcePoolTestReboots::ParallelCreateDrop >> SystemView::ShowCreateTableSequences [GOOD] |51.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |51.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |51.2%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |51.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/import_test >> ydb-tests-datashard-ttl::import_test [GOOD] |51.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> SystemView::ShowCreateTablePartitionPolicyIndexTable >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TKeyValueTracingTest::WriteSmall [GOOD] |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] |51.3%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |51.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |51.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |51.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat >> KqpScheme::CreateTableWithCompactionPolicyUncompat |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] |51.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] |51.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |51.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-10-24T16:21:31.881420Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/000cac/r3tmp/tmpj5V1mZ//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-10-24T16:21:31.882037Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq >> KqpScheme::CreateTableWithCompactionPolicyUncompat [GOOD] >> KqpScheme::CreateTableWithCompactionPolicyCompat >> SystemView::ShowCreateTableColumnAlterObject [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,1000000,0] >> KqpOlapJson::EmptyVariants[1,true,1,10,0,0] >> KqpOlapJson::SimpleVariants[2,false,1,10,0,0] >> KqpScheme::CreateAndAlterTableWithBloomFilterCompat [GOOD] >> KqpOlapAggregations::Aggregation_Avg_Null >> SystemView::ShowCreateTableFamilyParameters |50.7%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |50.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> KqpScheme::CreateAndAlterTableComplex |50.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |50.5%| [TS] {RESULT} ydb/tests/stress/kafka/tests/flake8 >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,100,0] >> KqpScheme::CreateTableWithCompactionPolicyCompat [GOOD] >> KqpScheme::CreateAndAlterTableComplex [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,0,0,0] >> KqpScheme::CreateTableVectorIndexInvalidSettingsPublicApi |50.5%| [TS] {RESULT} ydb/tests/functional/limits/import_test |50.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> KqpScheme::CreateAlterUserWithHash >> KqpOlapJson::SimpleVariants[2,false,1,10,100,0] |50.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |50.5%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |50.5%| [TS] {RESULT} ydb/tests/olap/common/flake8 |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style >> KqpOlapAggregations::Aggregation_Avg_Null [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMix >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::FilterVariants[2,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,1000000,0.5] |50.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test >> KqpOlapJson::EmptyVariants[1,true,0,0,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,1000000,0] >> KqpOlapJson::SimpleVariants[2,false,1,10,100,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Transactions_Conflict_On_SeqNo_Query [GOOD] >> KqpScheme::CreateTableVectorIndexInvalidSettingsPublicApi [GOOD] Test command err: 2025-10-24T16:20:41.158646Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824002012473937:2212];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:41.158686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:41.164959Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0013d8/r3tmp/tmpsPxzED/pdisk_1.dat 2025-10-24T16:20:41.243988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:41.247794Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1635, node 1 2025-10-24T16:20:41.259774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0013d8/r3tmp/yandexlyisbT.tmp 2025-10-24T16:20:41.259787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0013d8/r3tmp/yandexlyisbT.tmp 2025-10-24T16:20:41.259871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0013d8/r3tmp/yandexlyisbT.tmp 2025-10-24T16:20:41.259931Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:41.268756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:41.268795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:41.268818Z INFO: TTestServer started on Port 3463 GrpcPort 1635 2025-10-24T16:20:41.269778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3463 PQClient connected to localhost:1635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:41.298718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:41.307293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:41.538617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:41.670471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824002012474569:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.670499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.670611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824002012474596:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.670622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824002012474597:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.670642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.671341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824002012474629:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.671352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.671377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:41.671382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824002012474631:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.671387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.673087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824002012474600:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:41.711978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.719315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.733131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7564824002012474937:2615] 2025-10-24T16:20:41.770757Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824002012474928:2609] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:42.159410Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:46.158642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824002012473937:2212];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:46.158675Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:46.958639Z :Transactions_Conflict_On_SeqNo_Table INFO: TTopicSdkTestSetup started 2025-10-24T16:20:46.961541Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:46.966102Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824023487311671:2722] connected; active server actors: 1 2025-10-24T16:20:46.966220Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:46.966347Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:46.966379Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-10-24T16:20:46.968117Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:20:46.968285Z node 1 :PERSQUEUE DEBU ... 2025-10-24T16:21:22.029782Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:22.029784Z node 2 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Batch completed (2) 2025-10-24T16:21:22.029785Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 16 messageNo: 5 requestId: cookie: 646 2025-10-24T16:21:22.029790Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:21:22.029795Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Topic 'topic_A' partition {15, {2, 281474976715753}, 100059} part blob processing sourceId '\0test-message_group_id_15' seqNo 626 partNo 0 2025-10-24T16:21:22.029798Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100059 offset 1 partNo 0 count 9 size 5746 2025-10-24T16:21:22.029812Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Topic 'topic_A' partition {15, {2, 281474976715753}, 100059} part blob complete sourceId '\0test-message_group_id_15' seqNo 626 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 358 count 1 nextOffset 6 batches 1 2025-10-24T16:21:22.029817Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Topic 'topic_A' partition {15, {2, 281474976715753}, 100059} part blob processing sourceId '\0test-message_group_id_15' seqNo 627 partNo 0 2025-10-24T16:21:22.029821Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Topic 'topic_A' partition {15, {2, 281474976715753}, 100059} part blob complete sourceId '\0test-message_group_id_15' seqNo 627 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 952 count 2 nextOffset 7 batches 1 2025-10-24T16:21:22.029824Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Try persist 2025-10-24T16:21:22.029850Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] Add new write blob: topic 'topic_A' partition {15, {2, 281474976715753}, 100059} compactOffset 5,2 HeadOffset 5 endOffset 5 curOffset 7 D0000100059_00000000000000000005_00000_0000000002_00000? size 931 WTime 1761322882029 2025-10-24T16:21:22.029863Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037912][Partition][{15, {2, 281474976715753}, 100059}][StateIdle] No data for blobs compaction 2025-10-24T16:21:22.029869Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 15 messageNo: 6 requestId: cookie: 622 2025-10-24T16:21:22.029869Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 100059 offset 1 count 8 size 5355 actorID [2:7564824094125174881:2487] 2025-10-24T16:21:22.029875Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 15 messageNo: 7 requestId: cookie: 623 2025-10-24T16:21:22.029879Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 15 messageNo: 8 requestId: cookie: 624 2025-10-24T16:21:22.029880Z node 2 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:21:22.029881Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037907' partition 100059 offset 1 partno 0 count 8 parts 0 suffix '63' size 5355 2025-10-24T16:21:22.029882Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 15 messageNo: 9 requestId: cookie: 625 2025-10-24T16:21:22.029886Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037912][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:21:22.029886Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 5364 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:21:22.029892Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037912][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100059 offset 5 partNo 0 count 2 size 931 2025-10-24T16:21:22.029892Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029897Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 566, partNo: 0, Offset: 1 is stored on disk 2025-10-24T16:21:22.029900Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029904Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 567, partNo: 0, Offset: 2 is stored on disk 2025-10-24T16:21:22.029906Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029910Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 568, partNo: 0, Offset: 3 is stored on disk 2025-10-24T16:21:22.029912Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029916Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 569, partNo: 0, Offset: 4 is stored on disk 2025-10-24T16:21:22.029919Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029923Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 570, partNo: 0, Offset: 5 is stored on disk 2025-10-24T16:21:22.029927Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029931Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 571, partNo: 0, Offset: 6 is stored on disk 2025-10-24T16:21:22.029934Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029938Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 572, partNo: 0, Offset: 7 is stored on disk 2025-10-24T16:21:22.029940Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] TPartition::ReplyWrite. Partition: {18, {2, 281474976715753}, 100059} 2025-10-24T16:21:22.029944Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Answering for message sourceid: '\0test-message_group_id_18', Topic: 'topic_A', Partition: {18, {2, 281474976715753}, 100059}, SeqNo: 573, partNo: 0, Offset: 8 is stored on disk 2025-10-24T16:21:22.029956Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:22.029958Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:22.029960Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] Try persist 2025-10-24T16:21:22.029966Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][{18, {2, 281474976715753}, 100059}][StateIdle] No data for blobs compaction 2025-10-24T16:21:22.029970Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 6 requestId: cookie: 566 2025-10-24T16:21:22.029976Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 7 requestId: cookie: 567 2025-10-24T16:21:22.029979Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 8 requestId: cookie: 568 2025-10-24T16:21:22.029983Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 9 requestId: cookie: 569 2025-10-24T16:21:22.029987Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 14 requestId: cookie: 570 2025-10-24T16:21:22.029991Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 15 requestId: cookie: 571 2025-10-24T16:21:22.029991Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic_A' requestId: 2025-10-24T16:21:22.029994Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037902] got client message batch for topic 'topic_A' partition 19 2025-10-24T16:21:22.029994Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 16 requestId: cookie: 572 2025-10-24T16:21:22.029998Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 18 messageNo: 17 requestId: cookie: 573 2025-10-24T16:21:22.030003Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037902] got client message topic: topic_A partition: 19 SourceId: '\0test-message_group_id_19' SeqNo: 595 partNo : 0 messageNo: 15 size 707 offset: -1 2025-10-24T16:21:22.030005Z node 2 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037907 (partition=18) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-10-24T16:21:22.030008Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic_A' requestId: >> KqpScheme::CreateTableAlterTableVectorIndexInvalidSettings+CreateTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMix [GOOD] >> KqpScheme::CreateAlterUserWithHash [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:34.730794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:34.730818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.730823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:34.730831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:34.730838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:34.730842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:34.730856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.730870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:34.730974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:34.731029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:34.741693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:34.741716Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:34.744510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:34.744540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:34.744583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:34.745890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:34.746055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:34.746152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.746217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:34.747019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.747053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:34.747230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:34.747237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.747254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:34.747260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:34.747264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:34.747282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.748408Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:34.763199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:34.763306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.763376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:34.763386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:34.763457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:34.763473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:34.764273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.764311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:34.764383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.764394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:34.764400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:34.764405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:34.764803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.764816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:34.764821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:34.765145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.765152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.765156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:34.765161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:34.765582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:34.765978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:34.766027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:34.766200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.766218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:34.766224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:34.766274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:34.766279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:34.766302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:34.766310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:34.766670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:34.766677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1588 Memory: 124168 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:21:32.592599Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:21:32.592612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.1588 2025-10-24T16:21:32.592624Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:21:32.592629Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:21:32.633399Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:32.633433Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:32.633440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-10-24T16:21:32.633468Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:609: Will execute TTxStoreStats, queue# 1 2025-10-24T16:21:32.633473Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-10-24T16:21:32.633504Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-10-24T16:21:32.633520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-10-24T16:21:32.633525Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-10-24T16:21:32.633572Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-10-24T16:21:32.633608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2025-10-24T16:21:32.633617Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:227: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-10-24T16:21:32.633644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:469: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-10-24T16:21:32.633672Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:32.633799Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553210, Sender [3:140:2161], Recipient [3:319:2304]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-10-24T16:21:32.633841Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:140:2161], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-10-24T16:21:32.634146Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-10-24T16:21:32.634154Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2025-10-24T16:21:32.635690Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [3:309:2296], Recipient [3:319:2304]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:21:32.636524Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.152000Z 2025-10-24T16:21:32.636548Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2025-10-24T16:21:32.636558Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:140:2161]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:21:32.636648Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553211, Sender [3:319:2304], Recipient [3:140:2161]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-10-24T16:21:32.636658Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-10-24T16:21:32.636691Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-10-24T16:21:32.636710Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 29.996000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2025-10-24T16:21:32.636823Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553210, Sender [3:140:2161], Recipient [3:319:2304]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-10-24T16:21:32.636854Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:140:2161], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-10-24T16:21:32.637101Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.152000Z 2025-10-24T16:21:32.637107Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2025-10-24T16:21:32.637866Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [3:309:2296], Recipient [3:319:2304]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:21:32.637893Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:30.152000Z 2025-10-24T16:21:32.638652Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [3:309:2296], Recipient [3:319:2304]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:21:32.638716Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.156000Z 2025-10-24T16:21:32.638723Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2025-10-24T16:21:32.638730Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:140:2161]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:21:32.638779Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553211, Sender [3:319:2304], Recipient [3:140:2161]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-10-24T16:21:32.638785Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5306: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-10-24T16:21:32.638800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2025-10-24T16:21:32.638810Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.992000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-10-24T16:21:32.639271Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [3:309:2296], Recipient [3:319:2304]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:21:32.649496Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:32.649522Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:32.649527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:32.660722Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:30.156000Z >> KqpOlapAggregations::Aggregation_Avg_GroupBy >> KqpScheme::CreateAlterUserLoginNoLogin >> SystemView::ShowCreateTablePartitionPolicyIndexTable [GOOD] >> SystemView::StoragePoolsFields >> KqpOlapJson::EmptyVariants[1,true,1,10,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,10,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,0,0] [GOOD] |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,100,0] >> KqpOlapJson::FilterVariants[2,true,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,1000000,0] >> KqpOlapJson::OrFilterVariants[1,false,0,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,1000000,0.5] >> KqpOlapJson::EmptyVariants[1,true,0,0,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,0,0,0.5] |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics >> KqpOlapJson::OrFilterVariants[1,false,0,0,0,0] [GOOD] |50.0%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/flake8 |50.0%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/import_test >> KqpOlapJson::OrFilterVariants[1,false,0,0,0,0.5] >> KqpScheme::CreateAlterUserLoginNoLogin [GOOD] >> KqpScheme::ChangefeedTopicPartitions >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,0,0] >> KqpScheme::CreateTableAlterTableVectorIndexInvalidSettings+CreateTable [GOOD] >> KqpScheme::CreateTableAlterTableVectorIndexInvalidSettings-CreateTable >> KqpOlapJson::SimpleVariants[2,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,1000000,0.5] |50.0%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,0,0] >> KqpOlapJson::EmptyVariants[1,true,0,0,100,0] [GOOD] |50.0%| [TS] {RESULT} ydb/tests/library/ut/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/s3/flake8 >> KqpOlapJson::EmptyVariants[1,true,0,0,1000000,0] |50.0%| [TS] {RESULT} ydb/tests/functional/security/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |50.0%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |50.0%| [TS] {RESULT} ydb/tests/functional/hive/flake8 >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> KqpOlapJson::OrFilterVariants[1,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,0,0,100,0] |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 >> KqpOlapJson::EmptyVariants[1,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,10,100,0] >> KqpOlapJson::SimpleVariants[2,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,1000,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,100,0.5] >> SystemView::ShowCreateTableFamilyParameters [GOOD] >> SystemView::QueryStatsRetries >> KqpScheme::ChangefeedTopicPartitions [GOOD] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> KqpScheme::CreateAlterDropTableStore >> KqpOlapJson::EmptyVariants[1,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,0,100,0.5] >> IndexBuildTestReboots::DropIndexUniq [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SystemView::DescribeAccessDenied [GOOD] Test command err: 2025-10-24T16:20:44.102887Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824015360133957:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:44.103070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0015d0/r3tmp/tmpcmZmJO/pdisk_1.dat 2025-10-24T16:20:44.133198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:44.141085Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1487, node 1 2025-10-24T16:20:44.157954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:44.157964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:44.157965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:44.158009Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:44.204496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.204529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.209588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:44.214730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) waiting... 2025-10-24T16:20:44.219569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.219597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.220522Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:20:44.220761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.220777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.221459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:44.222145Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:20:44.240112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:44.240215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:44.240233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:44.242525Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:20:44.242932Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:20:44.279763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) waiting... 2025-10-24T16:20:44.289237Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7564824013234175877:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:44.289339Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:44.289740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.289759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.290651Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-10-24T16:20:44.290802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:44.290820Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824013613695495:2162];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:44.290843Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:44.291098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.291114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.291503Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions 2025-10-24T16:20:44.291883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:44.292090Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:20:44.292239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-10-24T16:20:44.292373Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions 2025-10-24T16:20:44.375059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:44.392945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824015360135211:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.392945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824015360135200:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.392955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.393009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824015360135215:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.393017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.393611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard ... nsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) waiting... 2025-10-24T16:21:12.294171Z node 44 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[44:7564824134585266885:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:12.294251Z node 44 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:12.299705Z node 45 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:21:12.298930Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:12.310244Z node 43 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions 2025-10-24T16:21:12.310650Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:12.310675Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:12.313866Z node 42 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 43 Cookie 43 2025-10-24T16:21:12.314271Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:12.315204Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:12.315223Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:12.315741Z node 42 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 44 Cookie 44 2025-10-24T16:21:12.315759Z node 44 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions 2025-10-24T16:21:12.316962Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:12.668666Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:12.756899Z node 42 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [42:7564824134414031855:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:12.756920Z node 42 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:12.757001Z node 42 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [42:7564824134414031867:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:12.757009Z node 42 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [42:7564824134414031868:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:12.757034Z node 42 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:12.757940Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:12.779266Z node 42 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [42:7564824134414031871:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2025-10-24T16:21:12.853776Z node 42 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [42:7564824134414031945:2943] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:12.872153Z node 42 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgcbxm6f6ypb2h5h7mpnhw, Database: , SessionId: ydb://session/3?node_id=42&id=ZmJkM2ViOGQtNjkzOGQ5Ny1jZGFhNTY4MS04MTI4ZDNhNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:12.932176Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:12.973835Z node 42 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:13.050102Z node 42 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k8bgcc63671hm6f5zrdsek1s, Database: , SessionId: ydb://session/3?node_id=42&id=ZmJkM2ViOGQtNjkzOGQ5Ny1jZGFhNTY4MS04MTI4ZDNhNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:13.061852Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:13.128381Z node 42 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715669. Ctx: { TraceId: 01k8bgcc8ndm0yrkay7dxdxz2n, Database: , SessionId: ydb://session/3?node_id=42&id=ZmJkM2ViOGQtNjkzOGQ5Ny1jZGFhNTY4MS04MTI4ZDNhNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:13.160181Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2025-10-24T16:21:13.162811Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2025-10-24T16:21:13.164323Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2025-10-24T16:21:13.166497Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2025-10-24T16:21:13.167822Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2025-10-24T16:21:13.169604Z node 42 :TX_PROXY ERROR: describe.cpp:393: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2025-10-24T16:21:13.205892Z node 42 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 45 2025-10-24T16:21:13.206051Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.206080Z node 42 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 46 2025-10-24T16:21:13.206120Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.207471Z node 42 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 43 2025-10-24T16:21:13.207581Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.208512Z node 42 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 44 2025-10-24T16:21:13.208615Z node 42 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.213425Z node 42 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[44:7564824134585267031:2112], Type=268959746 2025-10-24T16:21:13.213441Z node 42 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[44:7564824134585267031:2112], Type=268959746 2025-10-24T16:21:13.213445Z node 42 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[44:7564824134585267031:2112], Type=268959746 2025-10-24T16:21:13.213449Z node 42 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[44:7564824134585267031:2112], Type=268959746 2025-10-24T16:21:13.227560Z node 46 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:13.228344Z node 45 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:13.298245Z node 44 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:13.299126Z node 43 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |49.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> KqpOlapJson::OrFilterVariants[1,false,0,0,100,0] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,0,0,100,0.5] >> KqpOlapJson::SimpleVariants[2,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,100,0] >> KqpScheme::CreateAlterDropTableStore [GOOD] >> KqpScheme::CreateTableAlterTableVectorIndexInvalidSettings-CreateTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:37.368094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:37.368116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:37.368120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:37.368133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:37.368138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:37.368142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:37.368165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:37.368179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:37.368314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:37.368390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:37.383389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:37.383414Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:37.387378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:37.387418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:37.387450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:37.389068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:37.389335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:37.389438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:37.389531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:37.390876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:37.390931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:37.391400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:37.391413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:37.391455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:37.391465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:37.391470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:37.391495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.392845Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:37.412707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:37.412795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.412851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:37.412857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:37.412998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:37.413010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:37.413860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:37.413901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:37.413956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.413964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:37.413968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:37.413973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:37.414311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.414319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:37.414325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:37.414590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.414597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.414601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:37.414606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:37.415052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:37.415412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:37.415451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:37.415677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:37.415703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:37.415709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:37.415783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:37.415791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:37.415822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:37.415835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:37.416347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:37.416356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 2025-10-24T16:21:37.523504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:37.523658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:21:37.525252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:37.525315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:37.525959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435083, Sender [1:570:2498], Recipient [1:570:2498]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:37.525976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5293: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:37.526105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:37.526118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:37.526129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:37.526140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:37.526146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:37.526150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:37.526205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274399233, Sender [1:606:2498], Recipient [1:570:2498]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:37.526214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5401: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:37.526219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:570:2498] sender: [1:627:2058] recipient: [1:15:2062] 2025-10-24T16:21:37.587673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:626:2541], Recipient [1:570:2498]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:21:37.587690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:37.587718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:37.587776Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 51us result status StatusSuccess 2025-10-24T16:21:37.587920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:37.588036Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271188001, Sender [1:628:2542], Recipient [1:570:2498]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-10-24T16:21:37.588042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-10-24T16:21:37.588048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-10-24T16:21:37.588053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2025-10-24T16:21:37.588061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:141: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-10-24T16:21:37.588074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:629:2543], Recipient [1:570:2498]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:21:37.588078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:37.588101Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:37.588113Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 13us result status StatusSuccess 2025-10-24T16:21:37.588156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpOlapAggregations::Aggregation_Avg_GroupBy [GOOD] |49.6%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScheme::CreateTableVectorIndexInvalidSettingsPositions >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,0,0] [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> KqpOlapJson::EmptyVariants[1,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,0,1000000,0.5] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,100,0] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-CSV >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,0,0,100,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> AuthSystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-10-24T16:20:45.333992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824019671643081:2079];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.334019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00164b/r3tmp/tmpRqKxk4/pdisk_1.dat 2025-10-24T16:20:45.371477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:45.384674Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23295, node 1 2025-10-24T16:20:45.397348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:45.397359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:45.397366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:45.397399Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:45.435641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:45.435684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:45.441055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:45.453443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-10-24T16:20:45.453493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.453512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-10-24T16:20:45.453554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-10-24T16:20:45.453564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976715658:0 type: TxCreateExtSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 2] source path: 2025-10-24T16:20:45.453584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-10-24T16:20:45.453604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:20:45.453613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:20:45.453624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-10-24T16:20:45.453629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-10-24T16:20:45.454059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-10-24T16:20:45.454092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-10-24T16:20:45.454119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-10-24T16:20:45.454126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-10-24T16:20:45.454147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:20:45.454159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-10-24T16:20:45.454164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7564824019671643653:2387], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-10-24T16:20:45.454166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7564824019671643653:2387], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-10-24T16:20:45.454171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.454174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-10-24T16:20:45.454178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715658:0, at tablet# 72057594046644480 2025-10-24T16:20:45.454181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2025-10-24T16:20:45.454623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 waiting... 2025-10-24T16:20:45.455086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.455105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.455106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-10-24T16:20:45.455108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-10-24T16:20:45.455111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-10-24T16:20:45.455140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.455149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-10-24T16:20:45.455150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-10-24T16:20:45.455151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-10-24T16:20:45.455151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-10-24T16:20:45.455155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-10-24T16:20:45.455354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:20:45.455361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-10-24T16:20:45.455364Z node ... rivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-10-24T16:21:13.663582Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890819:2403], row count: 0, finished: 0 2025-10-24T16:21:13.663590Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:13.666542Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-10-24T16:21:13.666562Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890819:2403], row count: 0, finished: 0 2025-10-24T16:21:13.666571Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:13.666777Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-10-24T16:21:13.666793Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890819:2403], row count: 2, finished: 0 2025-10-24T16:21:13.666813Z node 51 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [51:7564824138839890819:2403], owner: [51:7564824138839890815:2401], scan id: 0, sys view info: Type: EAuthPermissions SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:13.667453Z node 51 :SYSTEM_VIEWS TRACE: sysview_service.cpp:901: Collect query stats: service id# [51:7564824134544921530:2079], database# /Root, query hash# 3241175529381461838, cpu time# 28645 2025-10-24T16:21:13.667663Z node 51 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322873660, txId: 281474976715687] shutting down 2025-10-24T16:21:13.700702Z node 51 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [51:7564824138839890854:2412], owner: [51:7564824138839890850:2410], scan id: 0, sys view info: Type: EAuthPermissions SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:13.704303Z node 51 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [51:7564824138839890854:2412], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:13.704321Z node 51 :SYSTEM_VIEWS DEBUG: auth_scan_base.h:100: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-10-24T16:21:13.704345Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:13.708322Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-10-24T16:21:13.708346Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890854:2412], row count: 0, finished: 0 2025-10-24T16:21:13.708360Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:13.709796Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-10-24T16:21:13.709820Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890854:2412], row count: 0, finished: 0 2025-10-24T16:21:13.710029Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:217: Navigate { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:21:13.710188Z node 51 :SYSTEM_VIEWS TRACE: auth_scan_base.h:176: Got navigate: { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-10-24T16:21:13.710232Z node 51 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [51:7564824138839890854:2412], row count: 1, finished: 0 2025-10-24T16:21:13.710261Z node 51 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [51:7564824138839890854:2412], owner: [51:7564824138839890850:2410], scan id: 0, sys view info: Type: EAuthPermissions SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:13.711361Z node 51 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322873694, txId: 281474976715689] shutting down 2025-10-24T16:21:13.711537Z node 51 :SYSTEM_VIEWS TRACE: sysview_service.cpp:901: Collect query stats: service id# [51:7564824134544921530:2079], database# /Root, query hash# 10048541180858699862, cpu time# 35134 2025-10-24T16:21:13.716916Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:13.716570Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 54 2025-10-24T16:21:13.716803Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(54, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.717207Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 53 2025-10-24T16:21:13.717285Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(53, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.717303Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 55 2025-10-24T16:21:13.717383Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(55, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.717659Z node 51 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 52 2025-10-24T16:21:13.717748Z node 51 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(52, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:13.716109Z node 53 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:13.725319Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[55:7564824137105016875:2112], Type=268959746 2025-10-24T16:21:13.725337Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[55:7564824137105016875:2112], Type=268959746 2025-10-24T16:21:13.725342Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[55:7564824137105016875:2112], Type=268959746 2025-10-24T16:21:13.725346Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[55:7564824137105016875:2112], Type=268959746 2025-10-24T16:21:13.725382Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[52:7564824133271959338:2105], Type=268959746 2025-10-24T16:21:13.725386Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[52:7564824133271959338:2105], Type=268959746 2025-10-24T16:21:13.725389Z node 51 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[52:7564824133271959338:2105], Type=268959746 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> KqpOlapJson::EmptyVariants[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,1000000,0] >> KqpOlapJson::OrFilterVariants[1,false,0,0,1000000,0.5] |49.6%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |49.6%| [TS] {RESULT} ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 |49.6%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |49.6%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test >> KqpOlapJson::EmptyVariants[1,true,1,10,100,0.5] >> SystemView::QueryStatsRetries [GOOD] >> KqpScheme::CreateAlterDropColumnTableInStore [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize |49.6%| [TS] {RESULT} ydb/tests/example/import_test |49.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |49.6%| COMPACTING CACHE 14.0GiB |49.6%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |49.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> KqpScheme::CleanupTemporaryTables >> KqpOlapJson::EmptyVariants[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,100,0] >> KqpScheme::CreateTableVectorIndexInvalidSettingsPositions [GOOD] |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |49.7%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |49.7%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/import_test >> KqpScheme::CreateTableStoreNegative >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,100,0.5] [GOOD] |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so >> KqpOlapJson::OrFilterVariants[1,false,0,0,1000000,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::DropIndexUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.773054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.773070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.773074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.773080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.773085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.773088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.773094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.773105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.773191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.773230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.787602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.787624Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.787694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.789874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.789942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.789974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.791954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.792056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.792151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.792229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.792900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.792924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.793065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.793071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.793089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.793095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.793099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.793114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.793987Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.809059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.809127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.809163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.809169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.809202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.809226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.809823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.809857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.809906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.809913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.809916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.809920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.810289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.810299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.810303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.810659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.810671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.810675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.810680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.811128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.811478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.811506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.811652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.811671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... HEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1003:2 2025-10-24T16:21:37.868696Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:21:37.868861Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:37.869164Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:37.869179Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:37.869183Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:37.869648Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:21:37.870546Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5943: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 357 RawX2: 936302872867 } TabletId: 72075186233409546 State: 4 2025-10-24T16:21:37.870571Z node 218 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-10-24T16:21:37.871026Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:21:37.871047Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:21:37.871133Z node 218 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:21:37.871214Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:21:37.871278Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-10-24T16:21:37.872235Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:21:37.872249Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:21:37.872263Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:21:37.872269Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:21:37.872277Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:21:37.872952Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:21:37.872967Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-10-24T16:21:37.873055Z node 218 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-10-24T16:21:37.873111Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:21:37.873117Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-10-24T16:21:37.873171Z node 218 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873189Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:21:37.873194Z node 218 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [218:591:2551] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:21:37.873262Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873297Z node 218 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 44us result status StatusSuccess 2025-10-24T16:21:37.873413Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873493Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873516Z node 218 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0" took 25us result status StatusPathDoesNotExist 2025-10-24T16:21:37.873538Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873587Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:37.873601Z node 218 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" took 16us result status StatusPathDoesNotExist 2025-10-24T16:21:37.873619Z node 218 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table/UserDefinedIndexByValue0/indexImplTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |49.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest |49.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 >> KqpScheme::CreateTableStoreNegative [GOOD] >> KqpScheme::CreateStreamingQueryErrors >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,100,0] [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> KqpOlapJson::EmptyVariants[1,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,10,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleVariants[2,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 17010, MsgBus: 10897 2025-10-24T16:21:33.772939Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824225382338182:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:33.772972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f77/r3tmp/tmpZdF2UK/pdisk_1.dat 2025-10-24T16:21:33.811902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:33.812591Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:33.812784Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824225382338076:2081] 1761322893772387 != 1761322893772390 TServer::EnableGrpc on GrpcPort 17010, node 1 2025-10-24T16:21:33.825257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:33.825269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:33.825271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:33.825313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10897 TClient is connected to server localhost:10897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:33.875631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:33.875668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:33.876735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:33.886826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:34.010449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.137179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229677306052:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.137203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.137253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229677306062:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.137267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.169151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.178803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.178838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.178861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.178882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.178899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.178914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.178931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.178947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.178965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.178980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.178996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.179016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:34.179031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677306124:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:34.179293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677306123:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.179331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677306123:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.179381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677306123:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.179386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:34.179392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:34.179400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:34.179403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:34.179406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677306123:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.179414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:34.179421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Cl ... 80;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322897854;max=18446744073709551615;plan=0;src=[6:7564824242487176098:2148];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.854956Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824242487176484:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=56165187352528;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322897854;max=18446744073709551615;plan=0;src=[6:7564824242487176098:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856589Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856605Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856608Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856625Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856635Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:37.856637Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:37.865425Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176586:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.865448Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.865492Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176588:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.865498Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.867402Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:37.869364Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:37.869364Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:37.873347Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176622:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.873364Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.873433Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176624:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.873444Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.875984Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:37.879762Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:37.879789Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:37.883822Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176659:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.883865Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176664:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.883871Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.883919Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824242487176667:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.883932Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.884491Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:37.887800Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824242487176666:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:37.941004Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824242487176719:2465] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,100,0.5] >> KqpOlapJson::EmptyVariants[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,10,1000000,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,0,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,1000000,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::OrFilterVariants[1,false,0,0,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 31096, MsgBus: 17629 2025-10-24T16:21:33.914918Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824225382250306:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:33.914934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00127a/r3tmp/tmpEsF2Z5/pdisk_1.dat 2025-10-24T16:21:33.964317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:33.967487Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31096, node 1 2025-10-24T16:21:33.981059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:33.981068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:33.981069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:33.981164Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17629 2025-10-24T16:21:34.019053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:34.019079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:34.020124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:34.047366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:34.141569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.303415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229677218248:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.303445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.303565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229677218258:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.303579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.355742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.374128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.374147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.374185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.374199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.374253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.374254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.374302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.374305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.374322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.374323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.374344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.374350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.374381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.374389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.374432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.374439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.374448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.374455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.374462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.374476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.374477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229677218320:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.374492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824229677218319:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.374502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622 ... s=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:39.503594Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:39.503600Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:39.503609Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:39.503613Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:39.503617Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:39.503626Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:39.503638Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:39.503650Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:39.503652Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:39.505101Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824249717383889:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=53365974588560;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322899504;max=18446744073709551615;plan=0;src=[7:7564824245422416231:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.506697Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.506719Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.506721Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:39.512545Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717383961:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.512568Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.512604Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717383963:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.512614Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.515356Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:39.517473Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:39.521589Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717383993:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.521606Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.521643Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717383995:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.521653Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.523645Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:39.532140Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:39.536581Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717384026:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.536608Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.536623Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717384031:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.536637Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824249717384033:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.536648Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.537293Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:39.546590Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824249717384035:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:39.613720Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824249717384086:2437] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE (JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3") OR (JSON_VALUE(Col2, "$.b") = "b1" AND JSON_VALUE(Col2, "$.c") = "c1") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:4/0/0 HEADER:0/0/0 >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] |49.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1749, MsgBus: 26713 2025-10-24T16:21:34.364136Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824228163084960:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:34.364167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001387/r3tmp/tmpj71zAO/pdisk_1.dat 2025-10-24T16:21:34.404174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:34.415416Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:34.415661Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824228163084917:2081] 1761322894363649 != 1761322894363652 TServer::EnableGrpc on GrpcPort 1749, node 1 2025-10-24T16:21:34.467141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:34.467198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:34.468471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:34.484363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:34.484376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:34.484378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:34.484413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26713 TClient is connected to server localhost:26713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:34.545844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:34.683293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.741710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824228163085592:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.741743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.741791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824228163085602:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.741802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.784554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.794709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.794835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.794870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.794898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.794911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.794928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.794957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.794973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.794995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.795011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.795032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.795049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:34.795061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824228163085655:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:34.795612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:34.795625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:34.795639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:34.795648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:34.795668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:34.795676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:34.795687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:34.795696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:34.795704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:34.795711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:34.795715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403788 ... t_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:39.310636Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:39.310643Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:39.310652Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:39.310659Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:39.310669Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:39.310685Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:39.310694Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:39.310709Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:39.310718Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:39.312281Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824250663644576:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=20464391594432;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322899312;max=18446744073709551615;plan=0;src=[7:7564824246368676913:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.313849Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.313870Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:39.313874Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:39.319103Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644648:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.319124Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.319164Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644650:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.319174Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.321594Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:39.323346Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:39.327295Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644680:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.327314Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.327370Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644682:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.327380Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.330051Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:39.336056Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:21:39.340473Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644713:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.340491Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.340507Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644718:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.340516Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824250663644720:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.340523Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.341133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:39.342986Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824250663644722:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:39.405561Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824250663644773:2437] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpOlapJson::EmptyVariants[1,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,100,0.5] |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> SystemView::TopPartitionsByTliFields [GOOD] |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> ViewQuerySplit::Basic [GOOD] >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> ViewQuerySplit::WithPragmaTablePathPrefix [GOOD] >> SystemView::StoragePoolsFields [GOOD] >> ViewQuerySplit::WithPairedPragmaTablePathPrefix [GOOD] >> ViewQuerySplit::WithComments [GOOD] >> ViewQuerySplit::Joins [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,0,0] |49.8%| [TS] {RESULT} ydb/tests/stress/topic_kafka/tests/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:38.821829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:38.821853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:38.821860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:38.821875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:38.821882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:38.821887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:38.821903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:38.821916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:38.822043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:38.822108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:38.836309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:38.836330Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:38.840677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:38.840705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:38.840823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:38.842164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:38.842311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:38.842392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.842469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:38.843286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:38.843320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:38.843641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:38.843648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:38.843676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:38.843681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:38.843685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:38.843702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.844906Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:38.866570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:38.866651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.866707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:38.866713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:38.866786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:38.866796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:38.867576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.867628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:38.867694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.867704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:38.867707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:38.867711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:38.868141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.868154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:38.868160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:38.868540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.868552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.868557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.868563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:38.869006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:38.869373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:38.869417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:38.869595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.869613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:38.869619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.869678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:38.869683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.869708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:38.869716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:38.870109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:38.870118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:39.062052Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186233409547][Topic1] pipe [1:576:2494] connected; active server actors: 1 2025-10-24T16:21:39.151440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:39.151525Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 114us result status StatusSuccess 2025-10-24T16:21:39.151659Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:39.936694Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-10-24T16:21:39.936730Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 1 2025-10-24T16:21:39.936938Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 0 2025-10-24T16:21:39.937066Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-10-24T16:21:39.937358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-10-24T16:21:39.937603Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:404: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-10-24T16:21:39.948445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:40.459164Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-10-24T16:21:40.459207Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-10-24T16:21:40.459483Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-10-24T16:21:40.459512Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-10-24T16:21:40.459542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-10-24T16:21:40.470979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:40.969453Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-10-24T16:21:40.969491Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-10-24T16:21:40.969742Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-10-24T16:21:40.969785Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-10-24T16:21:40.969836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-10-24T16:21:40.981047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:41.042187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:41.042281Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 118us result status StatusSuccess 2025-10-24T16:21:41.042431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:41.042570Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186233409547][Topic1] pipe [1:653:2562] connected; active server actors: 1 2025-10-24T16:21:41.046179Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-10-24T16:21:41.046265Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-10-24T16:21:41.046832Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:404: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-10-24T16:21:41.078111Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186233409547][Topic1] pipe [1:697:2596] connected; active server actors: 1 |49.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> KqpOlapJson::CompactionVariants[2,false,1,0,0,0] >> KqpOlapAggregations::Aggregation_Avg_NullGroupBy [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy >> KqpScheme::CreateStreamingQueryErrors [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,1000000,0] |49.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/statistics/flake8 |49.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |49.8%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 >> KqpOlapJson::EmptyVariants[1,true,0,10,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,10,0,0.5] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,100,0] >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,0,0] [GOOD] |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,100,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,0,0] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-CSV [GOOD] |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-ARROW |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 |49.8%| [TA] $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,0,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,100,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,10,1000000,0.5] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,1000,0,0.5] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,0,0.5] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-YDB_VALUE |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,100,0.5] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,1000000,0] |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SystemView::QueryStatsRetries [GOOD] Test command err: 2025-10-24T16:20:47.174114Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824029121364715:2089];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:47.174142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00167e/r3tmp/tmpngQ0IA/pdisk_1.dat 2025-10-24T16:20:47.219878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:47.239458Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11842, node 1 2025-10-24T16:20:47.251708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:47.251719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:47.251720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:47.251761Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63473 2025-10-24T16:20:47.276462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:47.276487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:47.277732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:47.303833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:47.492741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:47.587018Z node 1 :KQP_COMPILE_SERVICE INFO: kqp_compile_service.cpp:284: Subscribed for config changes 2025-10-24T16:20:47.587068Z node 1 :KQP_COMPILE_SERVICE INFO: kqp_compile_service.cpp:364: Updated config 2025-10-24T16:20:47.589437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824029121365762:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.589456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824029121365751:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.589512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:47.590230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:47.595789Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824029121365765:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:20:47.656218Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824029121365849:2739] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:47.656494Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-10-24T16:20:47.656524Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x000071A17BDEBCE8 2025-10-24T16:20:47.656539Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7564824029121365731:2325], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01k8bgbkb49tcejht62qhxacy4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTVmZjU3NDEtZjY1YTcwNDAtYWI3MTliOGQtZmE3ZTlhYmU=, PoolId: default} 2025-10-24T16:20:47.656553Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-10-24T16:20:47.656573Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7564824029121365731:2325], queueSize: 1 2025-10-24T16:20:47.656729Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7564824029121365731:2325], compileActor: [1:7564824029121365866:2338] 2025-10-24T16:20:47.729543Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01k8bgbkb49tcejht62qhxacy4, SessionId: CompileActor 2025-10-24 16:20:47.729 INFO ydb-core-sys_view-ut(pid=33150, tid=0x00007F65D95A3640) [core dq] kqp_host.cpp:1342: Good place to weld in 2025-10-24T16:20:47.729816Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01k8bgbkb49tcejht62qhxacy4, SessionId: CompileActor 2025-10-24 16:20:47.729 INFO ydb-core-sys_view-ut(pid=33150, tid=0x00007F65D95A3640) [core dq] kqp_host.cpp:1347: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '('"source_type" '"ObjectStorage"))) (return (Write! $2 (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/tier1")) '('typeId (String '"EXTERNAL_DATA_SOURCE"))) (Void) '('('mode 'createObject) '('features $3)))) ) 2025-10-24T16:20:47.729885Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01k8bgbkb49tcejht62qhxacy4, SessionId: CompileActor 2025-10-24 16:20:47.729 INFO ydb-core-sys_view-ut(pid=33150, tid=0x00007F65D95A3640) [KQP] kqp_host.cpp:1353: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsert ... imeMs: 1761322896775 UpdateTimeMs: 1761322896776 } MaxMemoryUsage: 1048576 } 2025-10-24T16:21:36.776570Z node 41 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710735. Ctx: { TraceId: 01k8bgd3b41yge2tna8pc32n5a, Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: default}. Compute actor has finished execution: [41:7564824236469019560:3030] 2025-10-24T16:21:36.776617Z node 41 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [41:7564824236469019546:3008] TxId: 281474976710735. Ctx: { TraceId: 01k8bgd3b41yge2tna8pc32n5a, Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: default}. terminate execution. 2025-10-24T16:21:36.776627Z node 41 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [41:7564824236469019546:3008] TxId: 281474976710735. Ctx: { TraceId: 01k8bgd3b41yge2tna8pc32n5a, Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000450s ReadRows: 7 ReadBytes: 151 ru: 7 rate limiter was not found force flag: 1 2025-10-24T16:21:36.776952Z node 41 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710736. Resolved key sets: 0 2025-10-24T16:21:36.776970Z node 41 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710736. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: , DatabaseId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:21:36.776976Z node 41 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [41:7564824236469019579:3008] TxId: 281474976710736. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: , DatabaseId: }. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:21:36.776990Z node 41 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [41:7564824236469019579:3008] TxId: 281474976710736. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: , DatabaseId: }. terminate execution. 2025-10-24T16:21:36.776999Z node 41 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [41:7564824236469019579:3008] TxId: 281474976710736. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=41&id=MjY3OTIwOWItMzIwNmYzNmEtNjVhZjY3NmMtNjk4YTYxYWQ=, PoolId: , DatabaseId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:21:36.780252Z node 41 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 41, TabletId: 72075186224037899 not found 2025-10-24T16:21:37.816582Z node 46 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[46:7564824242085997015:2089];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:37.816938Z node 46 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00167e/r3tmp/tmpHCZYbq/pdisk_1.dat 2025-10-24T16:21:37.819518Z node 46 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:37.838849Z node 46 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62996, node 46 2025-10-24T16:21:37.853818Z node 46 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:37.853828Z node 46 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:37.853829Z node 46 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:37.853875Z node 46 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:37.918517Z node 46 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:37.918553Z node 46 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:37.919935Z node 46 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:37.920193Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:37.924416Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:38.046702Z node 46 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:38.150778Z node 46 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7564824246380965233:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:38.150779Z node 46 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7564824246380965222:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:38.150792Z node 46 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:38.150846Z node 46 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7564824246380965237:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:38.150859Z node 46 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:38.151537Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:38.155910Z node 46 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7564824246380965236:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:21:38.245604Z node 46 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [46:7564824246380965325:2686] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:38.376094Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgd4q6e6h2wytphngdzfx7, Database: , SessionId: ydb://session/3?node_id=46&id=ZWE2ZTVmZTAtOTRlMDI0ZGEtN2IyNmI4YTUtNzFhNmFhOTQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:38.393219Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgd4ya55zrr01gzx332kze, Database: , SessionId: ydb://session/3?node_id=46&id=MjQ2NGZjMjYtYThjODEyOWItOWZlYjNlOTQtOGQ1NTRiYTU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:38.393957Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824246380965404:2348], owner: [46:7564824246380965401:2346], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:38.394232Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824246380965404:2348], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:38.394355Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824246380965404:2348], row count: 1, finished: 1 2025-10-24T16:21:38.394377Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824246380965404:2348], owner: [46:7564824246380965401:2346], scan id: 0, sys view info: Type: ETopQueriesByReadBytesOneMinute SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:38.394888Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322898392, txId: 281474976715662] shutting down |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 >> KqpOlapAggregations::Json_GetValue >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,0,0] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |49.8%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |49.8%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,1000000,0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> ViewQuerySplit::Joins [GOOD] Test command err: 2025-10-24T16:20:45.218483Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824017706217229:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.218502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001648/r3tmp/tmpDJ8usC/pdisk_1.dat 2025-10-24T16:20:45.252129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:45.266500Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3025, node 1 2025-10-24T16:20:45.277028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:45.277036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:45.277038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:45.277067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:45.320443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:45.320472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:45.321685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:45.329037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:45.474324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:45.530676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824017706217827:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:45.530696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824017706217853:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:45.530704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:45.531415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:45.532785Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824017706217856:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:20:45.611299Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824017706217918:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:45.672654Z node 1 :KQP_EXECUTER ERROR: kqp_scan_executer.cpp:149: ActorId: [1:7564824017706217951:2313] TxId: 281474976715661. Ctx: { TraceId: 01k8bgbh5d8k3g7e4k9dx55wz8, Database: , SessionId: ydb://session/3?node_id=1&id=ODhkNjlmYmMtZDNiYzBhZDktN2U4MjZiMjYtZjY2ODE5NmE=, PoolId: default, DatabaseId: /Root}. Can not find default state storage group for database 2025-10-24T16:20:45.672730Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgbh5d8k3g7e4k9dx55wz8, Database: , SessionId: ydb://session/3?node_id=1&id=ODhkNjlmYmMtZDNiYzBhZDktN2U4MjZiMjYtZjY2ODE5NmE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:20:45.677426Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7564824017706217958:2328], owner: [1:7564824017706217954:2326], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:45.677718Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7564824017706217958:2328], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:20:45.679327Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7564824017706217958:2328], row count: 1, finished: 1 2025-10-24T16:20:45.679365Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7564824017706217958:2328], owner: [1:7564824017706217954:2326], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:45.681111Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322845672, txId: 281474976715660] shutting down 2025-10-24T16:20:46.219897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:46.698502Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgbjetetcefhbxja7by0n9, Database: , SessionId: ydb://session/3?node_id=1&id=ZTllZDA5MWUtN2EyMzU0ZjctNGUzNTgxNjgtYTNkMjJlYzY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:20:46.698975Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7564824022001185306:2342], owner: [1:7564824022001185302:2340], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:46.699150Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7564824022001185306:2342], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:20:46.699231Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7564824022001185306:2342], row count: 1, finished: 1 2025-10-24T16:20:46.699250Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7564824022001185306:2342], owner: [1:7564824022001185302:2340], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:46.699703Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322846698, txId: 281474976715662] shutting down 2025-10-24T16:20:47.725076Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgbkem6haka2047snrdq0t, Database: , SessionId: ydb://session/3?node_id=1&id=MmJkYzg3MGUtNWJiYjIwMGItOGI2YjRhMDAtNzFhNmIyZTY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:20:47.725752Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7564824026296152638:2353], owner: [1:7564824026296152635:2351], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:47.726157Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7564824026296152638:2353], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:20:47.726312Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7564824026296152638:2353], row count: 1, finished: 1 2025-10-24T16:20:47.726330Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7564824026296152638:2353], owner: [1:7564824026296152635:2351], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:47.726840Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322847724, txId: 281474976715664] shutting down 2025-10-24T16:20:48.750223Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k8bgbmeq72pgn2jcmgx7ksjh, Database: , SessionId: ydb://session/3?node_id=1&id=ZmVhZDQ4OTMtZWMxMDMzZTgtN2Y1MjU1Zi01NzcyM2Y3Zg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:20:48.750811Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7564824030591119971:2364], owner: [1:7564824030591119968:2362], scan id: 0, sys view info: Type: EVSlots SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:20:48.751240Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7564824030591119971:2364], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1 ... collect.cpp:29: [72075186224037893] TTxCollect::Complete 2025-10-24T16:21:40.388535Z node 21 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [21:7564824208188365582:2065] 2025-10-24T16:21:40.419753Z node 20 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [20:7564824207023813455:2065] 2025-10-24T16:21:40.432954Z node 17 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715681. Ctx: { TraceId: 01k8bgd6xx2emjc8va4nf7rnan, Database: , SessionId: ydb://session/3?node_id=17&id=MzMzNTQ2ZDUtZjk4OTY4ZTAtYjMzZDg5OGMtNmNiMTgwM2M=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:40.433741Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [17:7564824256050202795:2438], owner: [17:7564824256050202791:2436], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.433992Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [17:7564824256050202795:2438], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-10-24T16:21:40.434213Z node 20 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:641: [72075186224037893] Reply batch: range# From { IntervalEndUs: 0 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 18446744073709551615 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-10-24T16:21:40.434291Z node 17 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [17:7564824256050202795:2438], row count: 1, finished: 1 2025-10-24T16:21:40.434305Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [17:7564824256050202795:2438], owner: [17:7564824256050202791:2436], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.435082Z node 17 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322900432, txId: 281474976715680] shutting down 2025-10-24T16:21:40.453554Z node 19 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [19:7564824209400306838:2065] 2025-10-24T16:21:40.464334Z node 20 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [20:7564824207023813541:2080] 2025-10-24T16:21:40.464926Z node 20 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:523: Send counters: service id# [20:7564824207023813541:2080], processor id# 72075186224037893, database# /Root/Tenant1, generation# 15304355476647204032, node id# 20, is retrying# 0, is labeled# 0 2025-10-24T16:21:40.465062Z node 20 :SYSTEM_VIEWS DEBUG: db_counters.cpp:449: [72075186224037893] TEvSendDbCountersRequest: node id# 20, generation# 15304355476647204032, services count# 1, request size# 468 2025-10-24T16:21:40.465097Z node 20 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:735: Handle TEvSysView::TEvSendDbCountersResponse: service id# [20:7564824207023813541:2080], database# /Root/Tenant1, generation# 15304355476647204032 2025-10-24T16:21:40.468597Z node 17 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715683. Ctx: { TraceId: 01k8bgd6yk0ebe4253w6mcnkk3, Database: , SessionId: ydb://session/3?node_id=17&id=ZjUwMDJhMTQtMWEyOTFmMjgtMzIxNTc3YjQtYTBkY2E2Mw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:40.488884Z node 19 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [19:7564824209400306927:2081] 2025-10-24T16:21:40.503341Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [17:7564824256050202829:2448], owner: [17:7564824256050202823:2445], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.510516Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [17:7564824256050202829:2448], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-10-24T16:21:40.510799Z node 20 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:641: [72075186224037893] Reply batch: range# From { IntervalEndUs: 0 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 18446744073709551615 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-10-24T16:21:40.512252Z node 17 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [17:7564824256050202829:2448], row count: 1, finished: 1 2025-10-24T16:21:40.512273Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [17:7564824256050202829:2448], owner: [17:7564824256050202823:2445], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.557885Z node 17 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322900468, txId: 281474976715682] shutting down 2025-10-24T16:21:40.563651Z node 21 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [21:7564824208188365667:2081] 2025-10-24T16:21:40.563982Z node 21 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:523: Send counters: service id# [21:7564824208188365667:2081], processor id# 72075186224037893, database# /Root/Tenant1, generation# 14582773934488729606, node id# 21, is retrying# 0, is labeled# 0 2025-10-24T16:21:40.564301Z node 20 :SYSTEM_VIEWS DEBUG: db_counters.cpp:449: [72075186224037893] TEvSendDbCountersRequest: node id# 21, generation# 14582773934488729606, services count# 1, request size# 293 2025-10-24T16:21:40.564404Z node 21 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:735: Handle TEvSysView::TEvSendDbCountersResponse: service id# [21:7564824208188365667:2081], database# /Root/Tenant1, generation# 14582773934488729606 2025-10-24T16:21:40.590216Z node 17 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715685. Ctx: { TraceId: 01k8bgd72e6w8phq9321qbvq3g, Database: , SessionId: ydb://session/3?node_id=17&id=YzcyYjk5OTMtOTk0MTk2NjMtODJlNTZjYjMtNzUyNWQ3NzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:40.591200Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [17:7564824256050202860:2457], owner: [17:7564824256050202857:2455], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.591412Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [17:7564824256050202860:2457], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-10-24T16:21:40.591627Z node 20 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:641: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1761322900000000 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 1761322900000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_BY_TLI_ONE_MINUTE , rows# 1, bytes# 63, next# 2025-10-24T16:21:40.591705Z node 17 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [17:7564824256050202860:2457], row count: 1, finished: 1 2025-10-24T16:21:40.591731Z node 17 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [17:7564824256050202860:2457], owner: [17:7564824256050202857:2455], scan id: 0, sys view info: Type: ETopPartitionsByTliOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:21:40.592384Z node 17 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322900589, txId: 281474976715684] shutting down 2025-10-24T16:21:40.594347Z node 17 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 21 2025-10-24T16:21:40.594392Z node 21 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [21:7564824208188365667:2081], processor id# 72075186224037893, database# /Root/Tenant1 2025-10-24T16:21:40.594399Z node 19 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [19:7564824209400306927:2081], processor id# 72075186224037899, database# /Root/Tenant2 2025-10-24T16:21:40.594430Z node 19 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [19:7564824209400306927:2081], database# /Root/Tenant2, processor id# 72075186224037899 2025-10-24T16:21:40.594542Z node 21 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [21:7564824208188365667:2081], database# /Root/Tenant1, processor id# 72075186224037893 2025-10-24T16:21:40.594502Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:40.594545Z node 17 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 18 2025-10-24T16:21:40.594600Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:40.594616Z node 17 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 20 2025-10-24T16:21:40.594664Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:40.594788Z node 17 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 19 2025-10-24T16:21:40.594846Z node 17 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:21:40.596128Z node 17 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[19:7564824209400307030:2107], Type=268959746 2025-10-24T16:21:40.596140Z node 17 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[19:7564824209400307030:2107], Type=268959746 2025-10-24T16:21:40.596144Z node 17 :HIVE WARN: hive_impl.cpp:974: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[19:7564824209400307030:2107], Type=268959746 2025-10-24T16:21:40.596255Z node 18 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [18:7564824209309147974:2080], processor id# 72075186224037899, database# /Root/Tenant2 2025-10-24T16:21:40.597255Z node 18 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [18:7564824209309147974:2080], database# /Root/Tenant2, processor id# 72075186224037899 2025-10-24T16:21:40.597518Z node 20 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [20:7564824207023813541:2080], processor id# 72075186224037893, database# /Root/Tenant1 2025-10-24T16:21:40.597581Z node 20 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [20:7564824207023813541:2080], database# /Root/Tenant1, processor id# 72075186224037893 2025-10-24T16:21:40.694715Z node 21 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [21:7564824208188365667:2081] 2025-10-24T16:21:40.779183Z node 18 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [18:7564824209309147890:2065] |49.9%| [TS] {RESULT} ydb/tests/datashard/ttl/import_test >> KqpOlapJson::EmptyVariants[1,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,10,100,0] >> KqpOlapJson::CompactionVariants[2,false,1,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,0,0] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,100,0] [GOOD] |49.9%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/backup_collection/flake8 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,1000000,0.5] >> KqpOlapJson::CompactionVariants[2,false,1,0,100,0] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,100,0] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-CSV |49.9%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |49.9%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CreateStreamingQueryErrors [GOOD] Test command err: Trying to start YDB, gRPC: 15352, MsgBus: 22267 2025-10-24T16:21:32.514126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824219675121510:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:32.514221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008ba/r3tmp/tmpBEitDy/pdisk_1.dat 2025-10-24T16:21:32.556394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:32.567310Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15352, node 1 2025-10-24T16:21:32.594033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:32.594047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:32.594048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:32.594084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:32.616600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:32.616627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:32.617773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22267 TClient is connected to server localhost:22267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:32.649639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:32.660345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.676474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.697413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.709177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.779193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:32.843909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824219675123025:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.843948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.843994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824219675123035:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.844003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.907091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.914679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.925246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.939595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.953287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.960357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.974384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.988318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:33.004676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824223970091192:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.004705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.004711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824223970091197:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.004745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824223970091199:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.004754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.005290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... 564824261346392859:2725], ActorState: ExecuteState, TraceId: 01k8bgd8238c3xggv4nvns0a7k, Create QueryResponse for error on request, msg: 2025-10-24T16:21:41.586221Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392886:2733], status: GENERIC_ERROR, issues:
:3:21: Warning: Symbol $x is not used, code: 4527
: Error: Type annotation, code: 1030
:3:26: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:2:17: Error: Streaming query must have at least one streaming read from topic 2025-10-24T16:21:41.586297Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NWY3OWRiYWYtNjJjMTY3YTctOTE1NjlkMGUtZmE1NDM2Njc=, ActorId: [8:7564824261346392884:2732], ActorState: ExecuteState, TraceId: 01k8bgd82eewctkwp51jsr56ne, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.595716Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392893:2736], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:57: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:4:42: Error: Streaming query with more than one streaming write to topic is not supported now 2025-10-24T16:21:41.595823Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=ZDk4ODJmOGUtOGUwZTBkNGMtNzZmNGU2YjYtNDQ1NGZhZWU=, ActorId: [8:7564824261346392891:2735], ActorState: ExecuteState, TraceId: 01k8bgd82j57wfxs0e5sw1qn77, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.604945Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392910:2743], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:57: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:50: Error: Reading from YDB tables is not supported now for streaming queries 2025-10-24T16:21:41.605030Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=YzUzMDg4MGEtNWVmNzY0YWYtYjkzZWEwZGMtNmI2YTI1Yzg=, ActorId: [8:7564824261346392908:2742], ActorState: ExecuteState, TraceId: 01k8bgd82w6p5evv38ty9t69r5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.610292Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392930:2751], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:28: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:21: Error: Results is not allowed for streaming queries, please use INSERT to record the query result 2025-10-24T16:21:41.610389Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=ZTM1MTVlNTctMjNkMjg1YTUtZjhlZjRhZWItMjJiZmExNzk=, ActorId: [8:7564824261346392928:2750], ActorState: ExecuteState, TraceId: 01k8bgd8351nqe4rz0nm1fgzts, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.615161Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392937:2754], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:66: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:33: Error: Writing into YDB tables is not supported now for streaming queries 2025-10-24T16:21:41.615225Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=MTc3NGJiMDEtMWQ4N2QyMGMtNGIwYzZjMGItMWNkNDEyMTM=, ActorId: [8:7564824261346392935:2753], ActorState: ExecuteState, TraceId: 01k8bgd83ba9hg8bn58prarh49, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.620325Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392949:2759], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:38: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:5:38: Error: Operations with YDB objects is not allowed inside streaming queries 2025-10-24T16:21:41.620402Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NmMzZjgzMWYtMjgyYzg5MTMtNGQ3NGU5MDMtNGQzYmJiOA==, ActorId: [8:7564824261346392947:2758], ActorState: ExecuteState, TraceId: 01k8bgd83f5a8npvzt367grp5z, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.625157Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392965:2763], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:33: Error: At function: KiWriteTable!
:3:33: Error: Cannot find table 'db.[/Root/MyTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:21:41.625236Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=YzQwMGIzN2MtYTYxMzY4ZDQtZTU5ZDAyYjItYmY2YjA0MTg=, ActorId: [8:7564824261346392963:2762], ActorState: ExecuteState, TraceId: 01k8bgd83nb0es9sbpkcqvw55k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:21:41.629798Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392981:2767], status: GENERIC_ERROR, issues:
:3:65: Error: Unknown name: $x 2025-10-24T16:21:41.629887Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=ZmRiODBmNWItZGY5NzU0OGEtMjQ4MWUyNjUtODEyYTBiMjc=, ActorId: [8:7564824261346392979:2766], ActorState: ExecuteState, TraceId: 01k8bgd83s6gy06v4m9zwzn2gy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.633537Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392988:2770], status: GENERIC_ERROR, issues:
:3:21: Warning: Symbol $x is not used, code: 4527
: Error: Type annotation, code: 1030
:3:26: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:2:17: Error: Streaming query must have at least one streaming read from topic 2025-10-24T16:21:41.633627Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NGQ5OTNjZTAtNmQ3N2YyOWQtMjgxMjYzMmEtODM4ZmVlODY=, ActorId: [8:7564824261346392986:2769], ActorState: ExecuteState, TraceId: 01k8bgd83y9hb0hb104r68z3z0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.644740Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346392995:2773], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:57: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:4:42: Error: Streaming query with more than one streaming write to topic is not supported now 2025-10-24T16:21:41.644847Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=ZGM0NmE2YjUtYzllZWJjYzMtOGVhMjYzN2EtOWVlMmYyNDc=, ActorId: [8:7564824261346392993:2772], ActorState: ExecuteState, TraceId: 01k8bgd8420a0gx1yr3j0jzpvc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.653551Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393012:2780], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:57: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:50: Error: Reading from YDB tables is not supported now for streaming queries 2025-10-24T16:21:41.653659Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NTg3YTAxNDktY2VkNWU3NDgtNjBkODMyYzUtZjVlYjkyMmI=, ActorId: [8:7564824261346393010:2779], ActorState: ExecuteState, TraceId: 01k8bgd84d8gxnc6rh9wfzktwg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.657910Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393032:2788], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:28: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:21: Error: Results is not allowed for streaming queries, please use INSERT to record the query result 2025-10-24T16:21:41.657997Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=OTI4NzE3YmYtOTlkMTFhMzQtNWVlNTRiNmItMjc2OTA5YTY=, ActorId: [8:7564824261346393030:2787], ActorState: ExecuteState, TraceId: 01k8bgd84p0xyzj01azkpjh7ec, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.662165Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393039:2791], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:66: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:3:33: Error: Writing into YDB tables is not supported now for streaming queries 2025-10-24T16:21:41.662213Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=MWU5M2NiOS00YTUzOGE1ZC0yOTkyOTc0My1kNjM0ZjA5NA==, ActorId: [8:7564824261346393037:2790], ActorState: ExecuteState, TraceId: 01k8bgd84tcbga17935r3wwz11, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.666259Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393051:2796], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:38: Error: At function: KiCreateObject!, At STREAMING_QUERY object operation
:5:38: Error: Operations with YDB objects is not allowed inside streaming queries 2025-10-24T16:21:41.666357Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NjU3OGQ1NjgtODA0MmIxZjItODFhZGMwNjgtN2M1NGVmZTc=, ActorId: [8:7564824261346393049:2795], ActorState: ExecuteState, TraceId: 01k8bgd84y8kmb9qywmxbhpvab, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:21:41.670794Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393060:2800], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:33: Error: At function: KiWriteTable!
:3:33: Error: Cannot find table 'db.[/Root/MyTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:21:41.670851Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NGEwMDc4YzMtMWJjMDc3ZTYtYWY5YjJhMy1jZGMwN2ZiZQ==, ActorId: [8:7564824261346393058:2799], ActorState: ExecuteState, TraceId: 01k8bgd8522wzsxqwjz5pwnh9j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:21:41.674726Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824261346393069:2804], status: GENERIC_ERROR, issues:
:3:65: Error: Unknown name: $x 2025-10-24T16:21:41.674814Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=OGQxN2EzMmMtNmVmNTczZTgtODc2ZTQxM2ItNWE0NWRmNjY=, ActorId: [8:7564824261346393067:2803], ActorState: ExecuteState, TraceId: 01k8bgd85748tad08a76ghy3sf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,100,0.5] |49.9%| [TS] {RESULT} ydb/tests/functional/backup_collection/import_test |49.9%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,0,0] [GOOD] |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:41.199377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:41.199403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:41.199410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:41.199424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:41.199430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:41.199435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:41.199455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:41.199470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:41.199616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:41.199714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:41.220515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:41.220541Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:41.224376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:41.224412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:41.224508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:41.226282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:41.226494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:41.226623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.226718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:41.228185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:41.228232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:41.228537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:41.228552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:41.228588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:41.228598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:41.228605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:41.228632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.230203Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:41.257371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:41.257464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.257526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:41.257536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:41.257617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:41.257635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:41.258525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.258577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:41.258649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.258661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:41.258668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:41.258674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:41.259183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.259200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:41.259210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:41.259613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.259627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.259634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.259642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:41.260423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:41.260924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:41.260972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:41.261202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.261230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:41.261238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.261316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:41.261326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.261357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:41.261372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:41.261878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:41.261889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ersist 2025-10-24T16:21:42.944883Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.944911Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.944916Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][1][StateIdle] Try persist 2025-10-24T16:21:42.944939Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.944943Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.944947Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][2][StateIdle] Try persist 2025-10-24T16:21:42.944957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.944961Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.944968Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][0][StateIdle] Try persist 2025-10-24T16:21:42.985587Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.985616Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.985621Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][1][StateIdle] Try persist 2025-10-24T16:21:42.985644Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.985648Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.985652Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][2][StateIdle] Try persist 2025-10-24T16:21:42.985663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:42.985667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:42.985684Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][0][StateIdle] Try persist 2025-10-24T16:21:43.057308Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.057340Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.057345Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][1][StateIdle] Try persist 2025-10-24T16:21:43.057369Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.057373Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.057385Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][2][StateIdle] Try persist 2025-10-24T16:21:43.057395Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.057400Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.057404Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][0][StateIdle] Try persist 2025-10-24T16:21:43.097968Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.097996Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.098001Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][1][StateIdle] Try persist 2025-10-24T16:21:43.098022Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.098026Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.098029Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][2][StateIdle] Try persist 2025-10-24T16:21:43.098037Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.098041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.098044Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][0][StateIdle] Try persist 2025-10-24T16:21:43.128494Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-10-24T16:21:43.128530Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-10-24T16:21:43.128661Z node 1 :PERSQUEUE DEBUG: partition.cpp:1016: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-10-24T16:21:43.128690Z node 1 :PERSQUEUE DEBUG: partition.cpp:1016: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-10-24T16:21:43.128719Z node 1 :PERSQUEUE DEBUG: partition.cpp:1016: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-10-24T16:21:43.128834Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-10-24T16:21:43.128866Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-10-24T16:21:43.128889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-10-24T16:21:43.140503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:43.181269Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.181305Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.181309Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][1][StateIdle] Try persist 2025-10-24T16:21:43.181337Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.181341Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.181345Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][2][StateIdle] Try persist 2025-10-24T16:21:43.181356Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186233409546][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:21:43.181360Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186233409546][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:21:43.181364Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186233409546][Partition][0][StateIdle] Try persist 2025-10-24T16:21:43.201859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:43.201945Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 110us result status StatusSuccess 2025-10-24T16:21:43.202068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpOlapAggregations::Json_GetValue [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,100,0] >> KqpOlapJson::EmptyVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,0,1000000,0] >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,0,1000000,0] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-DATASHARD-CSV [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,10,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,0,0] [GOOD] >> KqpOlapAggregations::Aggregation_Avg_NullMixGroupBy [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1,0,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,0,0] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,1000,0,0.5] >> KqpOlapAggregations::Json_GetValue_Minus >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,100,0] >> KqpBoolColumnShard::TestBoolAsPrimaryKey-EXECUTE_QUERY-ARROW >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1,0,0,0.5] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,100,0.5] >> KqpOlapJson::EmptyVariants[1,true,0,10,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,100,0] >> KqpOlapJson::CompactionVariants[2,false,1,0,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,1000000,0] |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,1000000,0.5] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,1000000,0.5] >> KqpOlapAggregations::Json_GetValue_Minus [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,100,0.5] >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,1000000,0.5] >> KqpOlapAggregations::Aggregation_Avg_GroupByNull |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,100,0] |49.9%| [TS] {RESULT} ydb/tests/stress/topic/tests/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SystemView::StoragePoolsFields [GOOD] Test command err: 2025-10-24T16:20:44.616807Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824016357875084:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:44.616824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00165e/r3tmp/tmpX75krf/pdisk_1.dat 2025-10-24T16:20:44.648006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:44.660790Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25584, node 1 2025-10-24T16:20:44.674576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:44.674590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:44.674592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:44.674638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1096 TClient is connected to server localhost:1096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:44.718668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:44.718701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:44.723653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:44.768082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:44.900032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:44.959455Z node 1 :KQP_COMPILE_SERVICE INFO: kqp_compile_service.cpp:284: Subscribed for config changes 2025-10-24T16:20:44.959533Z node 1 :KQP_COMPILE_SERVICE INFO: kqp_compile_service.cpp:364: Updated config 2025-10-24T16:20:44.962751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824016357876123:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.962753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824016357876131:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.962771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.962841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824016357876138:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.962852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:44.963558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:44.967468Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824016357876137:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:20:45.041264Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824020652843506:2733] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:20:45.041614Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-10-24T16:20:45.041645Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00001285D4FB03B8 2025-10-24T16:20:45.041656Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7564824016357876119:2326], queryUid: , queryText: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n ", keepInCache: 1, split: 0{ TraceId: 01k8bgbgs2d9fbndvv1j75fwr7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTk0ZTYzYjAtZGFmMzU1ZmMtMTY0NWZjMzgtMjYyNzhkOTc=, PoolId: default} 2025-10-24T16:20:45.041685Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n CREATE EXTERNAL DATA SOURCE `tier1` WITH (\n SOURCE_TYPE = \"ObjectStorage\",\n LOCATION = \"http://fake.fake/olap-tier1\",\n AUTH_METHOD = \"AWS\",\n AWS_ACCESS_KEY_ID_SECRET_NAME = \"accessKey\",\n AWS_SECRET_ACCESS_KEY_SECRET_NAME = \"secretKey\",\n AWS_REGION = \"ru-central1\"\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-10-24T16:20:45.041709Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7564824016357876119:2326], queueSize: 1 2025-10-24T16:20:45.041902Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7564824016357876119:2326], compileActor: [1:7564824020652843525:2338] 2025-10-24T16:20:45.079210Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01k8bgbgs2d9fbndvv1j75fwr7, SessionId: CompileActor 2025-10-24 16:20:45.079 INFO ydb-core-sys_view-ut(pid=29584, tid=0x00007FEE503A6640) [core dq] kqp_host.cpp:1342: Good place to weld in 2025-10-24T16:20:45.079419Z node 1 :KQP_YQL INFO: log.cpp:67: TraceId: 01k8bgbgs2d9fbndvv1j75fwr7, SessionId: CompileActor 2025-10-24 16:20:45.079 INFO ydb-core-sys_view-ut(pid=29584, tid=0x00007FEE503A6640) [core dq] kqp_host.cpp:1347: Compiled query: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"accessKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"secretAccessKey")))))) (let $2 (Write! $1 (DataSink '"kikimr" '"db") (Key '('objectId (String '"secretKey")) '('typeId (String '"SECRET"))) (Void) '('('mode 'upsertObject) '('features '('('"value" '"fakeSecret")))))) (let $3 '('('"auth_method" '"AWS") '('"aws_access_key_id_secret_name" '"accessKey") '('"aws_region" '"ru-central1") '('"aws_secret_access_key_secret_name" '"secretKey") '('"location" '"http://fake.fake/olap-tier1") '(' ... ] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.918401Z node 46 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7564824234231601914:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.918414Z node 46 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.918814Z node 46 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:35.920175Z node 46 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7564824234231601913:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:21:36.017820Z node 46 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [46:7564824238526569262:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:36.033133Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710661. Ctx: { TraceId: 01k8bgd290424r7eazc2s25yz6, Database: , SessionId: ydb://session/3?node_id=46&id=MjVmMjg2My01ZTk0NDU1MS1jZTA5YjY0OC1jMWM3YmJmNQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:36.033772Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824238526569304:2328], owner: [46:7564824238526569300:2326], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:36.033974Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824238526569304:2328], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:36.034090Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824238526569304:2328], row count: 0, finished: 1 2025-10-24T16:21:36.034124Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824238526569304:2328], owner: [46:7564824238526569300:2326], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:36.034539Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322896032, txId: 281474976710660] shutting down 2025-10-24T16:21:36.593830Z node 46 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:37.052069Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710663. Ctx: { TraceId: 01k8bgd3mb7qx12yxxn483xqyz, Database: , SessionId: ydb://session/3?node_id=46&id=OGUxN2QyZjAtOWEzOTZiZjctNDExZTNmOTgtYTdiNDE3MTI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:37.052573Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824242821536651:2342], owner: [46:7564824242821536647:2340], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:37.052751Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824242821536651:2342], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:37.052819Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824242821536651:2342], row count: 0, finished: 1 2025-10-24T16:21:37.052832Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824242821536651:2342], owner: [46:7564824242821536647:2340], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:37.053313Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322897051, txId: 281474976710662] shutting down 2025-10-24T16:21:38.072126Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710665. Ctx: { TraceId: 01k8bgd4m6ehvcanqfxgjqfsdb, Database: , SessionId: ydb://session/3?node_id=46&id=YTIxOWM1YjYtNTE1ZGE3MzYtMzFiNGYzZi0xYjI2NjVlMA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:38.072840Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824247116503983:2353], owner: [46:7564824247116503980:2351], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:38.073045Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824247116503983:2353], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:38.073102Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824247116503983:2353], row count: 0, finished: 1 2025-10-24T16:21:38.073115Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824247116503983:2353], owner: [46:7564824247116503980:2351], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:38.073563Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322898071, txId: 281474976710664] shutting down 2025-10-24T16:21:39.095805Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710667. Ctx: { TraceId: 01k8bgd5m240ge2axpyxkj9atj, Database: , SessionId: ydb://session/3?node_id=46&id=ZmQ5NmEwYmMtYmNkODQ4NGEtNGQ0NThmZDQtMmUxMWFkODY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:39.097149Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824251411471317:2364], owner: [46:7564824251411471313:2362], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:39.097710Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824251411471317:2364], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:39.097821Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824251411471317:2364], row count: 0, finished: 1 2025-10-24T16:21:39.097836Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824251411471317:2364], owner: [46:7564824251411471313:2362], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:39.098719Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322899094, txId: 281474976710666] shutting down 2025-10-24T16:21:40.113103Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710669. Ctx: { TraceId: 01k8bgd6m31tt1mc7ndwzaan1w, Database: , SessionId: ydb://session/3?node_id=46&id=N2U5N2Y4M2MtNjFiNjM1MTktZjBhMGMzMzAtZTg3YjE0MTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:40.113604Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824255706438650:2375], owner: [46:7564824255706438646:2373], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:40.113762Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824255706438650:2375], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:40.113810Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824255706438650:2375], row count: 0, finished: 1 2025-10-24T16:21:40.113825Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824255706438650:2375], owner: [46:7564824255706438646:2373], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:40.114284Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322900112, txId: 281474976710668] shutting down 2025-10-24T16:21:40.590983Z node 46 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[46:7564824234231601291:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:40.591007Z node 46 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:21:41.137307Z node 46 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710671. Ctx: { TraceId: 01k8bgd7kv5jdqdk6y150c344d, Database: , SessionId: ydb://session/3?node_id=46&id=Njk1YTRkMzAtMzFkYzg4ZTYtMTNlZjA3ZC1hNTU5ZDJlOA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:21:41.137835Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [46:7564824260001405988:2388], owner: [46:7564824260001405984:2386], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:41.137994Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [46:7564824260001405988:2388], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-10-24T16:21:41.142676Z node 46 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [46:7564824260001405988:2388], row count: 1, finished: 1 2025-10-24T16:21:41.142724Z node 46 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [46:7564824260001405988:2388], owner: [46:7564824260001405984:2386], scan id: 0, sys view info: Type: EStoragePools SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-10-24T16:21:41.143748Z node 46 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322901135, txId: 281474976710670] shutting down |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 >> KqpOlapJson::QuotedFilterVariants[2,false,1,10,1000000,0.5] [GOOD] >> KqpOlapAggregations::Json_GetValue_ToInt >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,false,1,1000,0,0] |49.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |50.0%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |50.0%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> KqpOlapAggregations::Json_GetValue_ToInt [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,1000000,0] [GOOD] |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 >> KqpOlapAggregations::JsonDoc_GetValue |50.0%| [TS] {RESULT} ydb/tests/sql/flake8 >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,1000000,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:33.208928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.208943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.208946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.208950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.208953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.208956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.208963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.208972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.209041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.209071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.218622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:33.218636Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.220853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.220874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.220907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.222184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.222389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.222476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.222537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.223501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.223536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.223733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.223746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.223774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.223782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.223790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.223811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.225055Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.239241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.239284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.239320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.239325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.239389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.239399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.239919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.239946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.239974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.239979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.239982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.239986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.240314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.240321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.240327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.240569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.240574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.240577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.240581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.240991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.241329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.241363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.241508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.241525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:33.241530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.241575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:33.241582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.241602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:33.241612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:33.241929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.241935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... shard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:17.131741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:21.173948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:21.174014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2025-10-24T16:21:21.174036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:21.258561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-10-24T16:21:21.258630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-10-24T16:21:21.258655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-10-24T16:21:21.334597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-10-24T16:21:21.334664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-10-24T16:21:21.334696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-10-24T16:21:21.380028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-10-24T16:21:21.380105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-10-24T16:21:21.380132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-10-24T16:21:21.432401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0009 2025-10-24T16:21:21.473127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:21.473211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:21.473237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:21.473279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:21.483652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:25.329207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0009 2025-10-24T16:21:25.371012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:25.371104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:25.371125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:25.371172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:25.381375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:29.468811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.001 2025-10-24T16:21:29.509750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:29.509844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:29.509871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:29.509924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:29.520140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:33.513887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0009 2025-10-24T16:21:33.554918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:33.555011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:33.555034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:33.555084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:33.565293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:37.477160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0009 2025-10-24T16:21:37.517913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:37.517989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:37.518009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:37.518042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:37.528186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:41.533651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0008 2025-10-24T16:21:41.574468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-10-24T16:21:41.574537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72075186233409549:4 data size 41 row count 1 2025-10-24T16:21:41.574553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-10-24T16:21:41.574585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:477: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-10-24T16:21:41.584785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-10-24T16:21:45.277104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:45.277254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-3-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2025-10-24T16:21:45.280290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,100,0.5] >> KqpOlapJson::QuotedFilterVariants[10,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1,0,100,0.5] [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,true,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[1,true,0,10,100,0.5] [GOOD] |50.0%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,1000,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[2,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 10049, MsgBus: 3536 2025-10-24T16:21:34.120902Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824231527111568:2140];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:34.120955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001389/r3tmp/tmpOCs9wv/pdisk_1.dat 2025-10-24T16:21:34.151096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:34.159540Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:34.159676Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824231527111466:2081] 1761322894120378 != 1761322894120381 TServer::EnableGrpc on GrpcPort 10049, node 1 2025-10-24T16:21:34.165229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:34.165236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:34.165237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:34.165266Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3536 TClient is connected to server localhost:3536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:34.228630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:34.228658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:34.229620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:34.230521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:34.306567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.447487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824231527112141:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.447508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.447561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824231527112151:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.447570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.476717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.492269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.492317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.492360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.492379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.492399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.492416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.492434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.492453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.492478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.492499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.492515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.492525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.492527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:34.492540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824231527112212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:34.492562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.492609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.492636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.492661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.492687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.492712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.492735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.492760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824231527112221:2324];tablet_id=72075186224037889;process=TTx ... failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:43.159835Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:21:43.159836Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:43.170238Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:43.172082Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710664;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:43.172096Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710664;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:43.177718Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:43.185867Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:43.185867Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:21:43.205425Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:21:43.205426Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:21:43.224928Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=26;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:43.683039Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:44.040448Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824268660231790:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:44.040460Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824268660231790:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:44.040538Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824268660231790:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322903095;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:44.040552Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824268660231790:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322903109;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:44.040607Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824268660231790:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322903221;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322903221 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:44.046053Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824268660231789:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:44.046062Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824268660231789:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:44.046127Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824268660231789:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322903095;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:44.046137Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824268660231789:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322903109;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:44.046176Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824268660231789:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322903221;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322903221 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.0%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |50.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:35.182248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:35.182269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:35.182273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:35.182280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:35.182285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:35.182288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:35.182299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:35.182310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:35.182397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:35.182443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:35.194854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:35.194873Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:35.198007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:35.198035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:35.198093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:35.199359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:35.199540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:35.199635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.199695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:35.200649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:35.200690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:35.200909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:35.200916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:35.200936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:35.200941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:35.200946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:35.200965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.202117Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:35.218236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:35.218315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.218371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:35.218377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:35.218465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:35.218476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:35.219214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.219249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:35.219301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.219307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:35.219312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:35.219315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:35.219617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.219625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:35.219629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:35.220041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.220053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.220060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.220067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:35.220816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:35.221283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:35.221324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:35.221580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.221605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:35.221615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.221675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:35.221684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.221714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:35.221728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:35.222148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:35.222157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-10-24T16:21:44.674386Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-10-24T16:21:44.674417Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:361:2338]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-10-24T16:21:44.674422Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5436: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-10-24T16:21:44.767353Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:765:2655]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:44.767392Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:44.767423Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-10-24T16:21:44.767442Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:21:44.767451Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2025-10-24T16:21:44.767458Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-10-24T16:21:44.767463Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2025-10-24T16:21:44.767510Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:765:2655]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:21:44.767547Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-10-24T16:21:44.767644Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:765:2655], Recipient [3:895:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9 Memory: 119288 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 117 TableOwnerId: 72075186233409549 FollowerId: 0 2025-10-24T16:21:44.767650Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:21:44.767667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-10-24T16:21:44.767683Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:21:44.767690Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:21:44.777846Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:767:2656]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:44.777872Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:44.777895Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-10-24T16:21:44.777914Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:21:44.777920Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2025-10-24T16:21:44.777925Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-10-24T16:21:44.777928Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2025-10-24T16:21:44.777957Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:767:2656]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:21:44.777991Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-10-24T16:21:44.778065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:767:2656], Recipient [3:895:2757]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 8 Memory: 119288 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 117 TableOwnerId: 72075186233409549 FollowerId: 0 2025-10-24T16:21:44.778070Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:21:44.778084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0008 2025-10-24T16:21:44.778112Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:21:44.788586Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:895:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:21:44.788611Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:21:44.788626Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:895:2757], Recipient [3:895:2757]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:44.788631Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:44.798757Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:895:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-10-24T16:21:44.798788Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5290: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-10-24T16:21:44.798795Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-10-24T16:21:44.798833Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-10-24T16:21:44.798852Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-10-24T16:21:44.798879Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:895:2757]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-10-24T16:21:44.798884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5436: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-10-24T16:21:44.798965Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269746180, Sender [3:1998:3821], Recipient [3:895:2757]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-10-24T16:21:44.798970Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5435: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-10-24T16:21:44.819459Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [3:2001:3824], Recipient [3:765:2655]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:44.819487Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:44.819496Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409552, clientId# [3:2000:3823], serverId# [3:2001:3824], sessionId# [0:0:0] 2025-10-24T16:21:44.819559Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553213, Sender [3:1999:3822], Recipient [3:765:2655]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-10-24T16:21:44.819654Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [3:2004:3827], Recipient [3:767:2656]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:44.819658Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:44.819663Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409553, clientId# [3:2003:3826], serverId# [3:2004:3827], sessionId# [0:0:0] 2025-10-24T16:21:44.819677Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553213, Sender [3:2002:3825], Recipient [3:767:2656]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,0,0] >> KqpOlapAggregations::JsonDoc_GetValue_ToString >> KqpOlapJson::CompactionVariants[2,false,1,0,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[1,false,0,0,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,0,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,100,0.5] |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so >> KqpOlapJson::QuotedFilterVariants[2,false,1,1000,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:38.648786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:38.648807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:38.648811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:38.648823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:38.648829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:38.648832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:38.648839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:38.648850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:38.649048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:38.649125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:38.662762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:38.662785Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:38.665374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:38.665429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:38.665467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:38.667898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:38.667974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:38.668083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.668174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:38.668778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:38.668818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:38.669088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:38.669096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:38.669116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:38.669122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:38.669127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:38.669138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.670396Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:21:38.690027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:38.690131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.690207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:38.690215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:38.690266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:38.690280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:38.691281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.691327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:38.691393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.691404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:38.691410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:38.691415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:38.691816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.691828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:38.691836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:38.692227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.692237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:38.692244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.692251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:38.692931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:38.693305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:38.693391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:38.693620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:38.693644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:38.693652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.693766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:38.693774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:38.693818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:38.693830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:38.694320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:38.694328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... t reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-10-24T16:21:41.936733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-10-24T16:21:41.936762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2092: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2152: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2210: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:21:41.936804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2296: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2362: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.936986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.937193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.940610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:41.941150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:41.941169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:41.941598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:41.941615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:41.941625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:41.941884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:760:2712] sender: [1:815:2058] recipient: [1:15:2062] 2025-10-24T16:21:41.983511Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:41.983607Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 124us result status StatusSuccess 2025-10-24T16:21:41.983720Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:41.984058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:41.984084Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2025-10-24T16:21:41.984145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/olap_workload >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,1000000,0.5] >> KqpOlapAggregations::JsonDoc_GetValue_ToString [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,0,0] >> KqpOlapJson::FilterVariants[2,true,1024,10,100,0] >> KqpBoolColumnShard::TestBoolAsPrimaryKey-EXECUTE_QUERY-ARROW [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,0,0] [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,0,0] [GOOD] |50.0%| [TS] {RESULT} ydb/tests/fq/http_api/import_test >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,100,0] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,100,0] |50.0%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |50.0%| [TS] {RESULT} ydb/tests/solomon/reading/import_test >> KqpOlapAggregations::JsonDoc_GetValue_ToInt >> KqpBoolColumnShard::TestBoolAsPrimaryKey-EXECUTE_QUERY-CSV >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,100,0] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,0,0] >> TxUsage::WriteToTopic_Demo_44_Table [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> KqpOlapAggregations::JsonDoc_GetValue_ToInt [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,0,0] >> KqpOlapJson::CompactionVariants[2,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,1000000,0] >> KqpOlapJson::FilterVariants[2,true,1024,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,1000000,0.5] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> KqpOlapAggregations::Json_Exists >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,100,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,100,0.5] >> KqpOlapJson::FilterVariants[2,true,1024,10,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,1000000,0] >> KqpOlapJson::CompactionVariants[2,false,1,0,1000000,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,1000000,0] |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |50.1%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/flake8 >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,0,0] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,100,0.5] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:35.207952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:35.207969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:35.207974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:35.207979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:35.207984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:35.207987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:35.207997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:35.208007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:35.208083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:35.208142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:35.220102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:35.220119Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:35.222693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:35.222714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:35.222767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:35.224121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:35.224191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:35.224288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.224339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:35.225158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:35.225199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:35.225407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:35.225414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:35.225435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:35.225441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:35.225446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:35.225462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.226803Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:35.242319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:35.242383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.242426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:35.242432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:35.242466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:35.242476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:35.242949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.242980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:35.243024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.243030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:35.243034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:35.243038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:35.243322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.243330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:35.243335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:35.243562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.243568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:35.243572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.243576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:35.244042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:35.244394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:35.244474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:35.244726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:35.244753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:35.244762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.244823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:35.244831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:35.244859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:35.244872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:35.245285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:35.245293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... shard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2025-10-24T16:21:45.762933Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:336:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:45.762938Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-10-24T16:21:45.762947Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-10-24T16:21:45.762952Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:21:45.762956Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2025-10-24T16:21:45.762959Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-10-24T16:21:45.762965Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2025-10-24T16:21:45.762979Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:336:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:21:45.763017Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-10-24T16:21:45.763065Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:333:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:21:45.763095Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-10-24T16:21:45.763183Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:336:2315], Recipient [3:140:2161]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4 Memory: 119288 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:21:45.763191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:21:45.763209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0004 2025-10-24T16:21:45.763230Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:21:45.763239Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:21:45.763285Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:333:2314], Recipient [3:140:2161]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 10 Memory: 124168 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:21:45.763293Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:21:45.763300Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.001 2025-10-24T16:21:45.763319Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:21:45.804315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:45.804346Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:45.804353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-10-24T16:21:45.804377Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:609: Will execute TTxStoreStats, queue# 2 2025-10-24T16:21:45.804395Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-10-24T16:21:45.804429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-10-24T16:21:45.804455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2025-10-24T16:21:45.804460Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-10-24T16:21:45.804485Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-10-24T16:21:45.804510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:469: Do not want to split tablet 72075186233409547 by size, its table already has 2 out of 2 partitions 2025-10-24T16:21:45.804523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-10-24T16:21:45.804532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-10-24T16:21:45.804535Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-10-24T16:21:45.804546Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-10-24T16:21:45.804553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:469: Do not want to split tablet 72075186233409546 by size, its table already has 2 out of 2 partitions 2025-10-24T16:21:45.804584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:45.814814Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:45.814846Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:21:45.814867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:21:45.846064Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [3:1336:3255], Recipient [3:333:2314]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:45.846094Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:45.846105Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409546, clientId# [3:1335:3254], serverId# [3:1336:3255], sessionId# [0:0:0] 2025-10-24T16:21:45.846162Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553213, Sender [3:1334:3253], Recipient [3:333:2314]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-10-24T16:21:45.846424Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [3:1339:3258], Recipient [3:336:2315]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:45.846429Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:45.846435Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409547, clientId# [3:1338:3257], serverId# [3:1339:3258], sessionId# [0:0:0] 2025-10-24T16:21:45.846454Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553213, Sender [3:1337:3256], Recipient [3:336:2315]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:41.687130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:41.687147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:41.687151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:41.687161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:41.687166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:41.687169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:41.687179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:41.687188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:41.687279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:41.687324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:41.701806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:41.701824Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:41.704289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:41.704314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:41.704345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:41.705884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:41.706106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:41.706201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.706272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:41.707350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:41.707393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:41.707592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:41.707598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:41.707623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:41.707629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:41.707633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:41.707652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.708995Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:41.727748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:41.727828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.727898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:41.727906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:41.727979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:41.727991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:41.728766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.728808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:41.728869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.728879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:41.728884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:41.728890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:41.729336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.729348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:41.729356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:41.729681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.729689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:41.729694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.729701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:41.730296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:41.730702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:41.730743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:41.730927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:41.730950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:41.730957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.731023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:41.731029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:41.731056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:41.731067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:41.731485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:41.731493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 4, at schemeshard: 72057594046678944 2025-10-24T16:21:41.903522Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-10-24T16:21:41.903547Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-10-24T16:21:41.903651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:41.904381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:21:41.904395Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:41.904433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:21:41.904482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:21:41.904487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:41.904865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:21:41.904889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:21:41.904922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877763, Sender [1:1028:2870], Recipient [1:291:2277]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1028:2870] ServerId: [1:1029:2871] } 2025-10-24T16:21:41.904928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5326: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-10-24T16:21:41.904936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6157: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:21:41.905037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:21:41.905045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:21:41.905127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:1044:2886], Recipient [1:291:2277]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:41.905133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:21:41.905137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-10-24T16:21:41.905158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [1:555:2486], Recipient [1:291:2277]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-10-24T16:21:41.905163Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-10-24T16:21:41.905174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:21:41.905193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:21:41.905198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1042:2884] 2025-10-24T16:21:41.905219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [1:1044:2886], Recipient [1:291:2277]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:21:41.905224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:21:41.905228Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-10-24T16:21:41.905315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:1045:2887], Recipient [1:291:2277]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:21:41.905321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:41.905334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:41.905382Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 46us result status StatusSuccess 2025-10-24T16:21:41.905527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:41.905650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271188001, Sender [1:1046:2888], Recipient [1:291:2277]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-10-24T16:21:41.905658Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-10-24T16:21:41.905665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-10-24T16:21:41.905671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2025-10-24T16:21:41.905693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:1047:2889], Recipient [1:291:2277]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:21:41.905698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:41.905789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:41.906192Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 27us result status StatusSuccess 2025-10-24T16:21:41.906306Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:39.518554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:39.518576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:39.518580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:39.518592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:39.518597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:39.518601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:39.518611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:39.518622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:39.518738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:39.518792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:39.529120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:39.529148Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:39.532001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:39.532030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:39.532077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:39.533499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:39.533655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:39.533728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:39.533816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:39.534809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:39.534846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:39.535068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:39.535075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:39.535106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:39.535112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:39.535116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:39.535137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.536562Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:39.551579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:39.551669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.551728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:39.551734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:39.551802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:39.551812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:39.552611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:39.552649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:39.552706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.552715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:39.552719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:39.552723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:39.553146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.553162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:39.553170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:39.553549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.553559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:39.553564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:39.553571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:39.554203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:39.554615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:39.554660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:39.554848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:39.554867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:39.554872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:39.554930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:39.554937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:39.554967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:39.554976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:39.555327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:39.555333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:47.934461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:21:47.935671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:47.935710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:47.935986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435083, Sender [1:1135:3066], Recipient [1:1135:3066]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:47.935995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5293: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:47.936341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:47.936352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:47.936542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [1:1135:3066], Recipient [1:1135:3066]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:47.936551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:47.936591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:47.936602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:47.936609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:47.936613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:47.937119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274399233, Sender [1:1171:3066], Recipient [1:1135:3066]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:47.937134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5401: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:47.937139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1135:3066] sender: [1:1192:2058] recipient: [1:15:2062] 2025-10-24T16:21:47.968795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:1191:3111], Recipient [1:1135:3066]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-10-24T16:21:47.968817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:47.968850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:47.968964Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 101us result status StatusSuccess 2025-10-24T16:21:47.969186Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4210 Memory: 141304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload_mixed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:36.905127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:36.905151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:36.905157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:36.905172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:36.905178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:36.905183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:36.905198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:36.905212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:36.905340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:36.905405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:36.917715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:36.917746Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:36.920302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:36.920331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:36.920354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:36.921563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:36.921704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:36.921791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:36.921861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:36.922643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:36.922675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:36.922881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:36.922889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:36.922917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:36.922922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:36.922926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:36.922944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.924253Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:36.940758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:36.940831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.940885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:36.940892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:36.940960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:36.940970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:36.941637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:36.941672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:36.941722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.941730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:36.941747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:36.941751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:36.942047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.942055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:36.942062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:36.942300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.942306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:36.942310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:36.942315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:36.942753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:36.943182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:36.943211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:36.943368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:36.943384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:36.943390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:36.943463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:36.943472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:36.943498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:36.943507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:36.943834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:36.943840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 0-24T16:21:42.992510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:42.992738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:21:42.994388Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:42.994441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:42.994962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435083, Sender [1:1017:2960], Recipient [1:1017:2960]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:42.994974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5293: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:42.995035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:42.995041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:42.995084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [1:1017:2960], Recipient [1:1017:2960]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:42.995088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:42.995103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:42.995109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:42.995113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:42.995116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:42.995137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274399233, Sender [1:1053:2960], Recipient [1:1017:2960]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:42.995140Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5401: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:42.995143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1017:2960] sender: [1:1072:2058] recipient: [1:15:2062] 2025-10-24T16:21:43.027049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:1071:3003], Recipient [1:1017:2960]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-10-24T16:21:43.027068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:43.027096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:43.027196Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 89us result status StatusSuccess 2025-10-24T16:21:43.027463Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82424 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants[1,true,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 4708, MsgBus: 18820 2025-10-24T16:21:33.771708Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824225619528417:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:33.771734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b5b/r3tmp/tmpWgDN4K/pdisk_1.dat 2025-10-24T16:21:33.813586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:33.823793Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:33.823993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824225619528300:2081] 1761322893770724 != 1761322893770727 TServer::EnableGrpc on GrpcPort 4708, node 1 2025-10-24T16:21:33.832350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:33.832360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:33.832361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:33.832390Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18820 TClient is connected to server localhost:18820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:33.879694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:33.879722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:33.880473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:33.880688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:34.097939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.113268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229914496271:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.113293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.113338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824229914496281:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.113347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.163607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.174460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.174509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.174561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.174587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.174607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.174631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.174655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.174682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.174706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.174729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.174755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.174777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:34.174804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824229914496334:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:34.175288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:34.175300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:34.175313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:34.175317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:34.175338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:34.175347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:34.175359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:34.175367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:34.175378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:34.175386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:34.175396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=T ... 025-10-24T16:21:45.651898Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824276201432620:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.651920Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.652000Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824276201432622:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.652015Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.657603Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.660299Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (1u), (2u), (3u), (4u) 2025-10-24T16:21:45.664643Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824276201432653:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.664667Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.664710Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824276201432658:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.664716Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824276201432659:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.664722Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.665374Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:45.671910Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824276201432662:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:21:45.731242Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824276201432713:2461] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:45.753503Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (11u), (12u), (13u), (14u) 2025-10-24T16:21:45.781621Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:21:45.947535Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:46.601783Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:46.601806Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:46.601811Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:46.601981Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905650;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:46.601992Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905678;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:46.601998Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905692;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:46.602158Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824276201432484:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322905706;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322905706 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] OUTPUT: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[[""]];[[""]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapAggregations::Aggregation_Avg_GroupByNull [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,0,0] [GOOD] >> TxUsage::WriteToTopic_Demo_44_Query >> KqpOlapJson::DuplicationCompactionVariants[10,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapAggregations::Json_Exists [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,0,0] |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants[1,true,0,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22729, MsgBus: 11819 2025-10-24T16:21:35.168994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824234386532828:2156];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:35.169037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001381/r3tmp/tmpe62wWg/pdisk_1.dat 2025-10-24T16:21:35.207640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:35.221102Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:35.221297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824234386532699:2081] 1761322895168100 != 1761322895168103 TServer::EnableGrpc on GrpcPort 22729, node 1 2025-10-24T16:21:35.230992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:35.231007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:35.231009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:35.231055Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11819 TClient is connected to server localhost:11819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:35.277010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:35.277040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:35.278109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:35.285330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:35.482042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:35.490732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824234386533374:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.490750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.490809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824234386533384:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.490819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.525269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:35.534601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:35.534648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:35.534686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:35.534710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:35.534730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:35.534754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:35.534771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:35.534786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:35.534802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:35.534814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:35.534834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:35.534846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:35.534860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234386533437:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:35.535249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:35.535260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:35.535269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:35.535272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:35.535288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:35.535293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:35.535300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:35.535303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:35.535308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:35.535315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:35.535319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.908366Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.916336Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:45.922392Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278448248253:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.922412Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.922442Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278448248255:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.922452Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.925179Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.930104Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (1u), (2u), (3u), (4u) 2025-10-24T16:21:45.937071Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278448248286:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.937094Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.937122Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278448248293:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.937129Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.937132Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278448248291:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.937861Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:45.944905Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824278448248295:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:21:46.042073Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824282743215642:2461] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:46.064471Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (11u), (12u), (13u), (14u) 2025-10-24T16:21:46.099193Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:46.486818Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:46.886071Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:46.886087Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:46.886093Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:46.886270Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905937;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:46.886287Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905951;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:46.886292Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905965;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:21:46.886385Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278448248117:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322905979;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322905979 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] OUTPUT: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[[""]];[[""]]] INDEX:0/0/0 HEADER:0/0/0 |50.1%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,0,0.5] |50.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,0,0.5] >> KqpOlapJson::FilterVariants[2,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,0,1000000,0.5] [GOOD] |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |50.1%| [TS] {RESULT} ydb/tests/olap/delete/import_test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,0,0] >> KqpOlapJson::FilterVariants[2,true,1024,10,1000000,0.5] |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,1000000,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,1000000,0] >> TResourcePoolTestReboots::ParallelCreateDrop [GOOD] |50.1%| [TS] {RESULT} ydb/tests/stress/cdc/tests/flake8 |50.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,1000,0,0.5] |50.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |50.1%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |50.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |50.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,100,0] |50.1%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test >> KqpOlapJson::FilterVariantsCount[1,true,1,10,100,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,100,0.5] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,100,0.5] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,1000000,0.5] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,1000000,0.5] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,100,0] |50.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 62308, MsgBus: 12813 2025-10-24T16:21:36.835025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824239098913736:2140];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:36.835095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00137b/r3tmp/tmpuGpol7/pdisk_1.dat 2025-10-24T16:21:36.875018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:36.875099Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:36.875255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824239098913634:2081] 1761322896834405 != 1761322896834408 TServer::EnableGrpc on GrpcPort 62308, node 1 2025-10-24T16:21:36.883809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:36.883821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:36.883822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:36.883880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12813 TClient is connected to server localhost:12813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:36.944720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:36.944751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:36.945743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:36.946185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:37.033390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:37.190843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824243393881610:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.190868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.190918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824243393881620:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.190935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:37.241114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:37.250412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:37.250456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:37.250497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:37.250519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:37.250536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:37.250555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:37.250574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:37.250591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:37.250615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:37.250638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:37.250658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:37.250674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:37.250692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824243393881673:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:37.251211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:37.251229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:37.251243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:37.251248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:37.251267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:37.251272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:37.251283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:37.251294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:37.251302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:37.251308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:37.251318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.849821Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824278427757850:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.849828Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.851377Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.854768Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:21:45.863743Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824278427757880:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.863776Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.863876Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824278427757885:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.863882Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824278427757886:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.863899Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.864685Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:45.867644Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-10-24T16:21:45.867672Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824278427757889:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:21:45.946657Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824278427757940:2416] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:45.969720Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:45.978491Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.985840Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715664;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:45.992915Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.999683Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:21:46.023436Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:21:46.045457Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:46.422669Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:46.842189Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824278427757775:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:46.842206Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824278427757775:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:46.842347Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824278427757775:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905888;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:46.842357Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824278427757775:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905902;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:46.842435Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824278427757775:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322906035;tx_id=281474976715664;;new_schema=Id: 0 SinceStep: 1761322906035 SinceTxId: 281474976715664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::RestoreFullJsonVariants[1,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1,10,0,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,0,0.5] >> KqpOlapJson::FilterVariants[2,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,0,0] |50.2%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest >> TResourcePoolTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:21:31.019725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:31.019744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:31.019748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:31.019754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:31.019759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:31.019762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:31.019769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:31.019784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:31.019907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:31.019953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:31.040114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:21:31.040150Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:31.040252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:21:31.043282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:31.043366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:31.043421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:31.045969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:31.046093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:31.046201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:31.046275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:31.047130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:31.047186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:31.047412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:31.047422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:31.047446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:31.047454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:31.047461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:31.047487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:21:31.048786Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:21:31.071256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:31.071344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:31.071415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:31.071422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:31.071461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:31.071475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:31.072441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:31.072492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:31.072548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:31.072557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:31.072560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:31.072565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:31.072940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:31.072951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:31.072957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:31.073389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:31.073397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:31.073401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:31.073407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:31.074130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:31.074494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:31.074607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:31.074828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:31.074849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... 046678944 2025-10-24T16:21:49.992203Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-10-24T16:21:49.992225Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:21:49.992235Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-10-24T16:21:49.992258Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:49.992263Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [75:212:2212], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-10-24T16:21:49.992268Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [75:212:2212], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2025-10-24T16:21:49.992272Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [75:212:2212], at schemeshard: 72057594046678944, txId: 1005, path id: 6 2025-10-24T16:21:49.992283Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-10-24T16:21:49.992290Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-10-24T16:21:49.992303Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-10-24T16:21:49.992307Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-10-24T16:21:49.992312Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-10-24T16:21:49.992316Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-10-24T16:21:49.992321Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-10-24T16:21:49.992329Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-10-24T16:21:49.992334Z node 75 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1005:0 2025-10-24T16:21:49.992338Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1005:0 2025-10-24T16:21:49.992350Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:21:49.992355Z node 75 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-10-24T16:21:49.992359Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2025-10-24T16:21:49.992363Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 7 2025-10-24T16:21:49.992366Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-10-24T16:21:49.992515Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992528Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992533Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:21:49.992538Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-10-24T16:21:49.992543Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-10-24T16:21:49.992607Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:21:49.992612Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-10-24T16:21:49.992620Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:21:49.992680Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992688Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992692Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:21:49.992699Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2025-10-24T16:21:49.992703Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:21:49.992936Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992951Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.992956Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:21:49.992960Z node 75 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-10-24T16:21:49.992964Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-10-24T16:21:49.992973Z node 75 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-10-24T16:21:49.993359Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.993395Z node 75 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:21:49.993418Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:21:49.993579Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-10-24T16:21:49.993621Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-10-24T16:21:49.993626Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-10-24T16:21:49.993677Z node 75 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-10-24T16:21:49.993695Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-10-24T16:21:49.993700Z node 75 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [75:411:2400] TestWaitNotification: OK eventTxId 1005 2025-10-24T16:21:49.993765Z node 75 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:21:49.993798Z node 75 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/DropMe" took 41us result status StatusPathDoesNotExist 2025-10-24T16:21:49.993843Z node 75 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.metadata/workload_manager/pools/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "pools" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |50.2%| [TM] {RESULT} ydb/library/yql/dq/actors/compute/ut/unittest >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,1000000,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,1000,0,0.5] [GOOD] >> KqpBoolColumnShard::TestBoolAsPrimaryKey-EXECUTE_QUERY-CSV [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts |50.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/simple_queue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants[1,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 19616, MsgBus: 12669 2025-10-24T16:21:34.544298Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824230111357440:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:34.544324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001386/r3tmp/tmpIKkUNq/pdisk_1.dat 2025-10-24T16:21:34.590376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:34.607027Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19616, node 1 2025-10-24T16:21:34.618631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:34.618643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:34.618645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:34.618692Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12669 2025-10-24T16:21:34.646585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:34.646616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:34.647666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:34.665823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:34.877043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:34.886477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824230111358066:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.886506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.886540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824230111358076:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.886544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:34.927074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:34.937930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:34.937989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:34.938040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:34.938067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:34.938092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:34.938112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:34.938137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:34.938160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:34.938182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:34.938209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:34.938234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:34.938262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:34.938288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824230111358129:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:34.938921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:34.938937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:34.938952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:34.938961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:34.938982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:34.938991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:34.939003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:34.939013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:34.939021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:34.939031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:34.939038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:34.939047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_ ... led to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.775561Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.783564Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:45.788496Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278843799954:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.788508Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.788624Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278843799956:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.788632Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.792186Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:45.796636Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (1u), (2u), (3u), (4u) 2025-10-24T16:21:45.801299Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278843799987:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.801312Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278843799992:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.801318Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.801452Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824278843799995:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.801464Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.802315Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:45.811829Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824278843799994:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:21:45.872192Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824278843800047:2461] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:45.892043Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (11u), (12u), (13u), (14u) 2025-10-24T16:21:45.918206Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:21:46.076031Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:46.752210Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:46.752227Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:46.752233Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:46.752423Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905804;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:46.752435Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905818;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:46.752443Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322905832;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:21:46.752605Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824278843799818:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322905846;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322905846 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] OUTPUT: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[[""]];[[""]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Json_Exists [GOOD] Test command err: Trying to start YDB, gRPC: 13271, MsgBus: 22719 2025-10-24T16:21:43.460184Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824269601774600:2145];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:43.460275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001369/r3tmp/tmpSzWxPJ/pdisk_1.dat 2025-10-24T16:21:43.501500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:43.522859Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:43.523049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824269601774484:2081] 1761322903459595 != 1761322903459598 TServer::EnableGrpc on GrpcPort 13271, node 1 2025-10-24T16:21:43.530192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:43.530203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:43.530204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:43.530232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22719 2025-10-24T16:21:43.563661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:43.563705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:43.564796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:43.581726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:43.592641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:43.616139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:43.616275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:43.616382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:43.616412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:43.616439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:43.616521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:43.616606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:43.616633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:43.616658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:43.616725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:43.616752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:43.616815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:43.616847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824269601775160:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:43.626688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:43.626748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:43.626792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:43.626820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:43.626846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:43.626873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:43.626900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:43.626926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:43.626950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:43.626974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:43.627000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:43.627024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:43.627096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824269601775161:2302];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:43.628371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:43.628441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Ch ... _internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.022828Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.022842Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.022845Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.023729Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.023745Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.023748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.024501Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.024518Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.024519Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; 2025-10-24T16:21:49.136452Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:49.242729Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824293306654644:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.242752Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824293306654625:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.242770Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.243517Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:49.245496Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824293306654654:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:21:49.341242Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824293306654715:2463] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:49.402517Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322909297, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT id, JSON_EXISTS(jsonval, "$.col1"), JSON_EXISTS(jsondoc, "$.col1") FROM `/Root/tableWithNulls` WHERE JSON_EXISTS(jsonval, "$.col1") AND level = 1; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"E-Size":"0","Name":"Filter","Predicate":"level == 1","Blocks":"True","Inputs":[{"InternalOperatorId":1}],"E-Rows":"0","Pushdown":"True","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"0","Table":"tableWithNulls","ReadColumns":["id (-∞, +∞)","jsondoc","jsonval","level"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonExists","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":14}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":13},{"Id":14}]},"Column":{"Id":15}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":11},{"Id":15}]},"Column":{"Id":16}}},{"Filter":{"Predicate":{"Id":16}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"0"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","jsondoc","jsonval","level"],"scan_by":["id (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"tableWithNulls","ReadColumns":["id (-∞, +∞)","jsondoc","jsonval","level"],"SsaProgram":{"Command":[{"Assign":{"Constant":{"Text":"$.col1"},"Column":{"Id":7}}},{"Assign":{"Function":{"KernelName":"JsonExists","KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":5},{"Id":7}]},"Column":{"Id":8}}},{"Assign":{"Function":{"Id":23,"Arguments":[{"Id":8}]},"Column":{"Id":9}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":10}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":9},{"Id":10}]},"Column":{"Id":11}}},{"Assign":{"Constant":{"Int32":1},"Column":{"Id":12}}},{"Assign":{"Function":{"YqlOperationId":11,"KernelIdx":2,"FunctionType":2,"Arguments":[{"Id":3},{"Id":12}]},"Column":{"Id":13}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":14}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":3,"FunctionType":2,"Arguments":[{"Id":13},{"Id":14}]},"Column":{"Id":15}}},{"Assign":{"Function":{"YqlOperationId":0,"KernelIdx":4,"FunctionType":2,"Arguments":[{"Id":11},{"Id":15}]},"Column":{"Id":16}}},{"Filter":{"Predicate":{"Id":16}}},{"Projection":{"Columns":[{"Id":1},{"Id":6},{"Id":5}]}}]},"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Size":"0","Name":"Filter","Predicate":"level == 1","Blocks":"True","E-Rows":"0","Pushdown":"True","E-Cost":"0"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (let $1 (DataType 'Bool)) (let $2 (OptionalType $1)) (let $3 (DqPhyStage '() (lambda '() (block '( (let $8 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $9 '('"id" '"jsondoc" '"jsonval" '"level")) (let $10 (Utf8 '"$.col1")) (let $11 (Bool 'false)) (let $12 (KqpBlockReadOlapTableRanges $8 (Void) $9 '() '() (lambda '($13) (block '( (let $14 '('?? (KqpOlapJsonExists '"jsonval" $10) $11 $1)) (let $15 '('eq '"level" (Int32 '1) $2)) (let $16 '('?? $15 $11 $1)) (let $17 '('"id" '"jsondoc" '"jsonval")) (return (TKqpOlapExtractMembers (KqpOlapFilter $13 (KqpOlapAnd $14 $16)) $17)) ))))) (return (FromFlow (NarrowMap (ToFlow (WideFromBlocks (FromFlow $12))) (lambda '($18 $19 $20) (block '( (let $21 '($2)) (let $22 (ResourceType '"JsonNode")) (let $23 (OptionalType $22)) (let $24 '((ResourceType '"JsonPath"))) (let $25 (DataType 'Utf8)) (let $26 (DictType $25 $22)) (let $27 '($26)) (let $28 (CallableType '() $21 '($23) $24 $27 $21)) (let $29 '('('"strict"))) (let $30 (Udf '"Json2.SqlExists" (Void) (VoidType) '"" $28 (VoidType) '"" $29)) (let $31 (IfPresent $20 (lambda '($41) (block '( (let $42 '((DataType 'Json) '"" '1)) (let $43 (CallableType '() '($22) $42)) (let $44 (Udf '"Json2.Parse" (Void) (VoidType) '"" $43 (VoidType) '"" '())) (return (Just (Apply $44 $41))) ))) (Nothing $23))) (let $32 (CallableType '() $24 '($25))) (let $33 (Udf '"Json2.CompilePath" (Void) (VoidType) '"" $32 (VoidType) '"" '())) (let $34 (Apply $33 $10)) (let $35 (Dict $26)) (let $36 (Just $11)) (let $37 (Apply $30 $31 $34 $35 $36)) (let $38 (CallableType '() $21 '((OptionalType (DataType 'JsonDocument))) $24 $27 $21)) (let $39 (Udf '"Json2.JsonDocumentSqlExists" (Void) (VoidType) '"" $38 (VoidType) '"" $29)) (let $40 (Apply $39 $19 $34 $35 $36)) (return (AsStruct '('"column1" $37) '('"column2" $40) '('"id" $18))) )))))) ))) '('('"_logical_id" '951) '('"_id" '"9b7b6cf5-b2f9f33e-fa17e845-5f450d57")))) (let $4 (DqCnUnionAll (TDqOutput $3 '"0"))) (let $5 (DqPhyStage '($4) (lambda '($45) $45) '('('"_logical_id" '1181) '('"_id" '"f811aad9-91acebe0-6327d91a-3dc03fc4")))) (let $6 '('"id" '"column1" '"column2")) (let $7 (DqCnResult (TDqOutput $5 '"0") $6)) (return (KqpPhysicalQuery '((KqpPhysicalTx '($3 $5) '($7) '() '('('"type" '"scan")))) '((KqpTxResultBinding (ListType (StructType '('"column1" $2) '('"column2" $2) '('"id" (DataType 'Int32)))) '"0" '"0")) '('('"type" '"scan_query")))) ) >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,1000000,0] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,false,0,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 61516, MsgBus: 13522 2025-10-24T16:21:41.819927Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824258338910126:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:41.819955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001278/r3tmp/tmpotaozi/pdisk_1.dat 2025-10-24T16:21:41.864882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:41.874929Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:41.875115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824258338910101:2081] 1761322901819790 != 1761322901819793 TServer::EnableGrpc on GrpcPort 61516, node 1 2025-10-24T16:21:41.882605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:41.882622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:41.882624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:41.882663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13522 TClient is connected to server localhost:13522 2025-10-24T16:21:41.924083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:41.924112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:41.925113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:41.940457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:41.945186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:42.131519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:42.180003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824262633878077:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.180035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.180129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824262633878087:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.180147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.210813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:42.221919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.221971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.222007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.222024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.222041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.222058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.222075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.222092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.222107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.222122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:42.222137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:42.222159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:42.222174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824262633878148:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:42.222555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:42.222566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:42.222575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:42.222578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:42.222589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:42.222592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:42.222598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:42.222606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:42.222611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:42.222617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normali ... 0257Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:47.868615Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:47.872409Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824286825041061:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.872431Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.872473Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824286825041063:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.872487Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.875180Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:47.882999Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:21:47.887003Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824286825041094:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.887028Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.887032Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824286825041099:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.887080Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824286825041101:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.887090Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.887644Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:47.889957Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824286825041102:2382], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:21:47.985577Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824286825041154:2459] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:48.001983Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:21:48.022451Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:21:48.039826Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:48.460056Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:48.841540Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:48.841561Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:48.841568Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:48.841707Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322907890;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:48.841727Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322907904;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:48.841732Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322907918;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:21:48.841840Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824286825040902:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322907932;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322907932 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[2,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 10896, MsgBus: 17869 2025-10-24T16:21:41.034049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824258198148469:2149];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:41.034076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001279/r3tmp/tmpDtynvg/pdisk_1.dat 2025-10-24T16:21:41.098669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:41.107443Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:41.107622Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824258198148348:2081] 1761322901033132 != 1761322901033135 TServer::EnableGrpc on GrpcPort 10896, node 1 2025-10-24T16:21:41.125868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:41.125879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:41.125886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:41.125926Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:41.137085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:41.137121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:41.138263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17869 TClient is connected to server localhost:17869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:41.201874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:41.359016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:41.603178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824258198149023:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:41.603210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:41.603312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824258198149033:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:41.603320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:41.651628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:41.666222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:41.666278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:41.666311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:41.666328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:41.666345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:41.666363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:41.666383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:41.666404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:41.666420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:41.666437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:41.666454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:41.666471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:41.666492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824258198149094:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:41.666920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:41.666930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:41.666938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824258198149096:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:41.666944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:41.666949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:41.666975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:41.666988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824258198149096:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:41.666988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:41.666998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:41.667000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:41.667005Z node 1 :TX_COLUMNSHARD ... ie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850279Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850298Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850298Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850301Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850308Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:47.850310Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:47.856308Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437025:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.856329Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.856370Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437027:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.856378Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.858750Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:47.861896Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:47.862095Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:47.866361Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437061:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.866379Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.866414Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437063:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.866425Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.869182Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:47.875822Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:47.876089Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:21:47.880518Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437098:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.880547Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.880612Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437103:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.880627Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824285535437104:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.880640Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.881252Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:47.883299Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824285535437107:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:47.958598Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824285535437158:2462] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[1,1,1],\"size\":[4,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[2,1,1]},\"columns\":{\"accessor\":[1],\"size\":[4],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[2]},\"o_size\":656,\"h_size\":760,\"c_size\":272}"]];[["{\"others\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"a.b.c\\\"\"],\"records\":[1]},\"columns\":{\"accessor\":[1],\"size\":[11],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[1]},\"o_size\":624,\"h_size\":720,\"c_size\":248}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.2%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |50.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,0,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,0,0] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,100,0.5] |50.2%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} >> KqpOlapJson::FilterVariantsCount[1,true,1,10,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,100,0] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,100,0] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,1000000,0.5] >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,0,0] >> KqpOlapJson::SimpleVariants[2,false,1,0,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,100,0] >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,0,1000000,0.5] [GOOD] |50.2%| [TS] {RESULT} ydb/tests/stress/topic/tests/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,10,0,0.5] Test command err: Trying to start YDB, gRPC: 23390, MsgBus: 4846 2025-10-24T16:21:43.218944Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824267916047228:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:43.218982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00136b/r3tmp/tmpmG3Yu1/pdisk_1.dat 2025-10-24T16:21:43.315824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:43.316691Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824267916047197:2081] 1761322903218701 != 1761322903218704 2025-10-24T16:21:43.321588Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23390, node 1 2025-10-24T16:21:43.323036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:43.323068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:43.324185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:43.340665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:43.340675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:43.340677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:43.340711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4846 TClient is connected to server localhost:4846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:43.400319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:43.567387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:43.798569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824267916047874:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.798595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.798671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824267916047884:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.798681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.838416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:43.853865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:43.853906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:43.853934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:43.853950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:43.853966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:43.853983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:43.854001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:43.854020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:43.854035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:43.854051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:43.854068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:43.854084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:43.854100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824267916048023:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:43.854466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:43.854477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:43.854486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:43.854488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:43.854501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:43.854504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:43.854511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:43.854513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:43.854518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:43.854521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:43.854524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403789 ... eaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.990571Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.990769Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.990783Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.990966Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.990977Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.991108Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.991146Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.991262Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:49.991319Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:21:49.996296Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824292388564748:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.996317Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.996320Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824292388564753:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.996359Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824292388564756:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.996370Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.996815Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:50.004564Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824292388564755:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:50.102452Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824296683532104:2686] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 |50.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,0,0.5] [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,100,0] >> KqpScheme::CleanupTemporaryTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[2,true,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 11659, MsgBus: 20591 2025-10-24T16:21:44.117960Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824270583447035:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:44.121523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:44.130098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001362/r3tmp/tmp1dixvR/pdisk_1.dat 2025-10-24T16:21:44.187682Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11659, node 1 2025-10-24T16:21:44.265786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:44.265797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:44.265799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:44.265831Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:44.272353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:44.272374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:44.273220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:44.363403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20591 TClient is connected to server localhost:20591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:44.486257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:45.127595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:45.201343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824274878414970:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.201369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.201489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824274878414979:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.201500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.488613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:45.522928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:45.523129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:45.523178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:45.523202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:45.523302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:45.523326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:45.523444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:45.523466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:45.523868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:45.524007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:45.524029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:45.524051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:45.524073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824274878415046:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:45.527162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:45.527180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:45.527197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:45.527203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:45.527225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:45.527231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:45.527241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:45.527248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:45.527256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:45.527262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:45.527268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... tx_id=281474976715658;this=21371723630480;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322910580;max=18446744073709551615;plan=0;src=[6:7564824296338927697:2142];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.582689Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.582706Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.582709Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.583356Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.583367Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.583369Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:50.593552Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928189:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.593580Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.593649Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928192:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.593661Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.595492Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:50.597535Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:50.597535Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:50.601780Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928225:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.601798Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.601850Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928227:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.601856Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.604639Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:50.606755Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:50.606756Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:50.610934Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928262:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.610963Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.611044Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928267:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.611058Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824296338928268:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.611074Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.611754Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:50.620365Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824296338928271:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:50.710619Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824296338928322:2466] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |50.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |50.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |50.2%| [TS] {RESULT} ydb/tests/functional/rename/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestBoolAsPrimaryKey-EXECUTE_QUERY-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 29243, MsgBus: 16131 2025-10-24T16:21:35.014099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824234261376592:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:35.014144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:35.016166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001384/r3tmp/tmpWACNbY/pdisk_1.dat 2025-10-24T16:21:35.030524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:35.030547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:35.031638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:35.036332Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:35.036470Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824234261376486:2081] 1761322895013544 != 1761322895013547 TServer::EnableGrpc on GrpcPort 29243, node 1 2025-10-24T16:21:35.046567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:35.046574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:35.046575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:35.046604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16131 TClient is connected to server localhost:16131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:35.111174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:35.113217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:35.355653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824234261377161:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.355685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.355763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824234261377171:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.355771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:35.390816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:35.399631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:35.399684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:35.399717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:35.399742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:35.399769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:35.399786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:35.399804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:35.399825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:35.399866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:35.399888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:35.399906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:35.399923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:35.399941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824234261377224:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:35.400389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:35.400405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:35.400424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:35.400436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:35.400463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:35.400478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:35.400490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:35.400497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:35.400505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:35.400512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:35.400517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp ... t=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:50.700703Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:50.700725Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:50.700743Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:50.700760Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:50.700779Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:50.700796Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:50.700814Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:50.700830Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:50.701261Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:50.701275Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:50.701285Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:50.701288Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:50.701300Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:50.701303Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:50.701310Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:50.701314Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:50.701319Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:50.701323Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:50.701329Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:50.701331Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:21:50.701347Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:21:50.701351Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:50.701359Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:50.701369Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:50.701373Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:50.701376Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:50.701381Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:50.701384Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:50.701392Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:50.701396Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:50.701403Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:50.701406Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:50.702961Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[13:7564824299807042267:2326];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=56445437889312;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322910702;max=18446744073709551615;plan=0;src=[13:7564824299807041915:2160];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.704734Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.704752Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.704755Z node 13 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:50.713148Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564824299807042346:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.713171Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564824299807042351:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.713174Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.713218Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564824299807042354:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.713226Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.713756Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:50.718731Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7564824299807042353:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:21:50.785125Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7564824299807042406:2399] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |50.2%| [TS] {RESULT} ydb/tests/fq/streaming/import_test |50.2%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |50.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |50.3%| [TS] {RESULT} ydb/tests/functional/tpc/medium/tpch/flake8 >> KqpOlapJson::BrokenJsonWriting[10,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,100,0] |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |50.3%| [TS] {RESULT} ydb/tests/functional/secrets/import_test |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |50.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,1000000,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,1000,0,0] >> KqpOlapJson::FilterVariants[2,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,10,0,0.5] |50.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |50.3%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,1000000,0.5] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,1000000,0] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,100,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24840, MsgBus: 7425 2025-10-24T16:21:45.317066Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824278215078171:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:45.317194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00135a/r3tmp/tmp3NFC4X/pdisk_1.dat 2025-10-24T16:21:45.440721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:45.441776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:45.441816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:45.442686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:45.452111Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24840, node 1 2025-10-24T16:21:45.556129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:45.556141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:45.556143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:45.556186Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:45.608784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7425 TClient is connected to server localhost:7425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:45.769318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:46.323174Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:46.611427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282510046101:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.611468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.611592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282510046110:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.611615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.742660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:46.789223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.789927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.790064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.790147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.790166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.790320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.790341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.790355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46.790379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:46.790463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:46.790483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:46.790500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:46.790618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282510046166:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:46.792925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:46.792945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:46.792959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:46.792964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:46.792983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:46.792988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:46.793000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:46.793011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:46.793018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:46.793023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:46.793036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstrac ... eprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.282072Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:51.282082Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:51.282094Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:51.282099Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:51.282116Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:51.282121Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:51.282133Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:51.282139Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:51.283539Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824303762839205:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=18960036038032;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322911283;max=18446744073709551615;plan=0;src=[6:7564824299467871540:2150];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.285872Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.285889Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.285892Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:51.296048Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839276:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.296071Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.296198Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839279:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.296227Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.296601Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:51.300436Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:51.309222Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839308:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.309241Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.309338Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839310:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.309347Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.313977Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:51.318009Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:51.330118Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839341:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.330140Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.330267Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839347:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.330287Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824303762839346:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.330293Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.331259Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:51.333689Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824303762839350:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:51.397413Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824303762839401:2439] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1024,0,0,0.5] >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,10,0,0.5] [GOOD] |50.3%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::CleanupTemporaryTables [GOOD] Test command err: Trying to start YDB, gRPC: 19639, MsgBus: 30017 2025-10-24T16:21:32.557514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824219729418563:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:32.557557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008bc/r3tmp/tmpJeyqUC/pdisk_1.dat 2025-10-24T16:21:32.602539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:32.606575Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19639, node 1 2025-10-24T16:21:32.616850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:32.616861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:32.616862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:32.616891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30017 2025-10-24T16:21:32.659789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:32.659844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:32.660838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:32.685555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:32.696916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.716969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.740874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.753639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:32.855458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:32.905096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824219729420081:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.905116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.905174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824219729420091:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.905181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:32.950044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.958028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.966930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.974156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:32.988229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:33.002621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:33.016701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:33.030965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:33.047414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824224024388250:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.047444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824224024388255:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.047447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.047495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824224024388258:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.047506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:33.048115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:39.546704Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:39.556376Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:39.566499Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:39.584771Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:39.597990Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:21:39.676359Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:39.837741Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824251363089367:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.837792Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.837959Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824251363089377:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.837983Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.852401Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.860672Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.869921Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.883761Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.897173Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.911831Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.925769Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.939895Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:21:39.957036Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824251363090239:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.957053Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824251363090244:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.957058Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.957129Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824251363090247:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.957142Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:39.957747Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:39.966480Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7564824251363090246:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:21:40.040488Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7564824255658057596:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:40.473715Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:44.472924Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7564824251363087851:2170];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:44.472979Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:21:45.263071Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__background_cleaning.cpp:196: Out of txIds for temp dir# /Root/first_dir. Background cleaning will be finished later. 2025-10-24T16:21:46.263641Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,0,0.5] >> KqpOlapJson::CompactionVariants[2,false,1,10,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1,10,0,0.5] |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |50.3%| [TS] {RESULT} ydb/tests/datashard/partitioning/flake8 >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,1024,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 25963, MsgBus: 17167 2025-10-24T16:21:44.897978Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824273912858240:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:44.903028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001361/r3tmp/tmpGjXzB3/pdisk_1.dat 2025-10-24T16:21:45.057631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:45.057812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:45.060223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:45.062987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25963, node 1 2025-10-24T16:21:45.119895Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:45.172048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:45.172062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:45.172064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:45.172106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17167 2025-10-24T16:21:45.263181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:45.322324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:45.903944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:46.706145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282502793461:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.707210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.707380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282502793471:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.707389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.707493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282502793473:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.707499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.925365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:46.972732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.973019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.973320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.973350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.973372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.973486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.973496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.973518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.973525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.973568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.973597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.973618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.973637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.973648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46.973659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.973671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824282502793551:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:46.973680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46.973699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:46.973719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:46.973742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824282502793537:2326];tablet_id=72075186224 ... 8;max=18446744073709551615;plan=0;src=[7:7564824302027272544:2145];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.778887Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824302027272952:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=91320583077744;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322911778;max=18446744073709551615;plan=0;src=[7:7564824302027272544:2145];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781234Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781249Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781251Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781729Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781746Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:51.781749Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:51.793359Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273054:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.793377Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.793463Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273057:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.793469Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.795271Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:51.798089Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.798272Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:51.808182Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273090:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.808219Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.808324Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273093:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.808335Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.809284Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:51.811213Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.811277Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:21:51.815928Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273127:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.815956Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.815959Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273132:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.815993Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824302027273134:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.816007Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.816628Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:51.824457Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824302027273135:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:51.897841Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824302027273187:2468] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:4/0/0 HEADER:0/0/0 |50.3%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/flake8 |50.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |50.3%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,1000000,0] >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,1000000,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,0,0,0.5] >> KqpOlapJson::CompactionVariants[1,true,1,0,100,0] >> KqpOlapJson::CompactionVariants[10,true,1024,0,100,0] >> KqpOlapJson::DoubleFilterVariants[10,true,0,0,0,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,0,0] >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,1000000,0.5] [GOOD] |50.3%| [TS] {RESULT} ydb/tests/stress/reconfig_state_storage_workload/tests/import_test |50.3%| [TS] {RESULT} ydb/tests/fq/yds/import_test |50.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> KqpOlapJson::SimpleExistsVariants[10,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,0,0.5] >> KqpOlapJson::FilterVariants[2,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,100,0] |50.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |50.3%| [TS] {RESULT} ydb/tests/functional/tpcc/import_test |50.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |50.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,1000,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,100,0.5] >> KqpOlapJson::SimpleVariants[2,false,1,0,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,100,0] >> KqpOlapJson::SimpleVariants[2,false,1,0,1000000,0] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,0,0.5] [GOOD] |50.4%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,100,0] |50.4%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/flake8 >> KqpOlapJson::CompactionVariants[1,false,1,0,0,0] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,0,0] [GOOD] |50.4%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,100,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,100,0] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,100,0] |50.4%| [TS] {RESULT} ydb/tests/stress/transfer/tests/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-10-24T16:21:23.356375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824180607604299:2220];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:23.356423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b83/r3tmp/tmpbulAHi/pdisk_1.dat 2025-10-24T16:21:23.399969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:23.413260Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62092, node 1 2025-10-24T16:21:23.423961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:23.423972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:23.423974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:23.424016Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:23.458275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:23.458318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:23.459876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:23.465026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:23.491516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:3460 2025-10-24T16:21:23.504663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:21:23.647728Z node 1 :PERSQUEUE ERROR: partition_read.cpp:823: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-10-24T16:21:23.647741Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2025-10-24T16:21:23.683041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:24.357532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:25.704698Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7564824192972910882:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:25.706220Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b83/r3tmp/tmplojohH/pdisk_1.dat 2025-10-24T16:21:25.712415Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:25.740657Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4340, node 4 2025-10-24T16:21:25.764353Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:25.764360Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:25.764361Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:25.764394Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:25.807287Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:25.807319Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:25.808880Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:25.810612Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:25.844673Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:6745 2025-10-24T16:21:25.858921Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:21:25.876943Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:26.704610Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:30.704858Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7564824192972910882:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:30.704898Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:21:40.733377Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:21:40.733397Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:50.707423Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7564824300073398882:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:50.707590Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b83/r3tmp/tmp2ZWv63/pdisk_1.dat 2025-10-24T16:21:50.716950Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:50.735027Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22103, node 7 2025-10-24T16:21:50.747695Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:50.747711Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:50.747713Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:50.747752Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:50.764294Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:50.782227Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:26315 2025-10-24T16:21:50.794893Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:21:50.809362Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:50.809394Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:50.810182Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:51.635887Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7564824304354120721:2145];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.635911Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:51.654776Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b83/r3tmp/tmpLvC2P3/pdisk_1.dat 2025-10-24T16:21:51.683672Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11011, node 10 2025-10-24T16:21:51.708197Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.708211Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.708213Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.708267Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.734399Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.734438Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.736079Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:51.756244Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:51.802197Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:7576 2025-10-24T16:21:51.820132Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:21:51.930241Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |50.4%| [TS] {RESULT} ydb/tests/functional/hive/import_test >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,0,0,0.5] [GOOD] |50.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |50.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,0,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,1000,0,0.5] |50.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |50.4%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |50.4%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |50.4%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/import_test >> KqpOlapJson::FilterVariants[2,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1024,1000,100,0.5] |50.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |50.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,1000000,0] >> KqpOlapJson::SimpleVariants[2,false,1,0,1000000,0] [GOOD] >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,0,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 14365, MsgBus: 25672 2025-10-24T16:21:44.113952Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824272909108122:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:44.114058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:44.124578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001364/r3tmp/tmplIrrxM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14365, node 1 2025-10-24T16:21:44.199894Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:44.232334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:44.232357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:44.233560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:44.241966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:44.241979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:44.241982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:44.242022Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:44.363937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25672 TClient is connected to server localhost:25672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:44.463462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:45.119912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:45.190328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824277204076072:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.190407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.190528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824277204076082:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.190537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:45.410967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:45.491155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:45.491249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:45.491330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:45.491352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:45.491385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:45.491408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:45.491429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:45.491450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:45.491471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:45.491495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:45.491516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:45.491539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:45.491560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824277204076211:2324];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:45.491682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:45.491693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:45.491728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:45.491742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:45.491753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:45.491765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:45.491776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:45.491787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:45.491798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824277204076212:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11; ... { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.840785Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.840975Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.840987Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841189Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841313Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841381Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841499Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841567Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:51.841709Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:21:51.846177Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824301044408392:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.846198Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.846200Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824301044408397:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.846222Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824301044408399:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.846230Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.846852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:51.852382Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824301044408400:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:51.945297Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824301044408452:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 |50.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |50.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,1000,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,0,0] >> KqpOlapJson::DoubleFilterVariants[10,true,0,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,0,0,0,0.5] >> KqpOlapJson::FilterVariantsCount[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,100,0.5] >> KqpOlapJson::FilterVariantsCount[1,true,1,1000,0,0] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,10,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,100,0.5] |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |50.4%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,100,0.5] |50.5%| [TS] {RESULT} ydb/tests/datashard/select/import_test |50.5%| [TS] {RESULT} ydb/tests/example/flake8 |50.5%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,0,0] |50.5%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,0,0] [GOOD] |50.5%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |50.5%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,100,0] >> KqpOlapJson::CompactionVariants[2,false,1,10,0,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg_GroupByNullMix [GOOD] Test command err: Trying to start YDB, gRPC: 23271, MsgBus: 10186 2025-10-24T16:21:33.811465Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824223639523386:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:33.811492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f5d/r3tmp/tmp2qzbRw/pdisk_1.dat 2025-10-24T16:21:33.845808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:33.846147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824223639523261:2081] 1761322893810719 != 1761322893810722 2025-10-24T16:21:33.846296Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23271, node 1 2025-10-24T16:21:33.854952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:33.854963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:33.854964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:33.855020Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10186 TClient is connected to server localhost:10186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:33.913758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:33.913779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:33.914784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:33.920036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:33.930565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:33.941478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:33.941547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:33.941595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:33.941646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:33.941664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:33.941688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:33.941728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:33.941748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:33.941764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:33.941781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:33.941797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:33.941814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:33.941837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824223639523936:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:33.942372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:33.942388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:33.942404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:33.942407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:33.942424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:33.942431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:33.942439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:33.942445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:33.942450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:33.942453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:33.942457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:33.942463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:21:33.942479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:21:33.942486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:33.942495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:33.942501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:33.942510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:33.942516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstrac ... 4037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.699217Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.699219Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.700194Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.700209Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:49.700211Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; 2025-10-24T16:21:49.856571Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:49.920948Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824294211402287:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.920975Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.921015Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824294211402324:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.921060Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824294211402326:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.921063Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.921726Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:49.923497Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824294211402327:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:21:50.020779Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824298506369675:2465] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:53.654956Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:53.727493Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322910000, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, AVG(id), AVG(level) FROM `/Root/tableWithNulls` WHERE id >= 5 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.level","Aggregation":"{[SUM(item.id),Inc()],[SUM(item.level),Inc()]}","Name":"Aggregate","Phase":"Intermediate"},{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"0","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id [5, +∞)","level"],"SsaProgram":{"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3}]}}]},"E-Cost":"0","ReadRangesExpectedSize":"1"}],"Node Type":"Aggregate-TableRangeScan"}],"HashFunc":"HashV2","Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.level","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [5, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id [5, +∞)","level"],"SsaProgram":{"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3}]}}]},"E-Cost":"0","ReadRangesExpectedSize":"1"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.level","Aggregation":"{[SUM(item.id),Inc()],[SUM(item.level),Inc()]}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"level\"], HashFunc: \"HashV2\")","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 (DataType 'Int32)) (let $2 (OptionalType $1)) (let $3 '('('"_logical_id" '2776) '('"_id" '"463928cd-255110f2-9b07921f-c1eaf609") '('"_partition_mode" '"single"))) (let $4 (DqPhyStage '() (lambda '() (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '('((Just (Int32 '"5")) (Int32 '1)) '((Nothing $2) (Int32 '0)))))))))))) $3)) (let $5 (DqCnValue (TDqOutput $4 '0))) (let $6 (KqpPhysicalTx '($4) '($5) '() '('('"type" '"compute")))) (let $7 '"%kqp%tx_result_binding_0_0") (let $8 (TupleType $2 $1)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (DataType 'Double)) (let $11 (TupleType $10 (DataType 'Uint64))) (let $12 (OptionalType $11)) (let $13 '('"level" $2)) (let $14 (StructType '('_yql_agg_0 $11) '('_yql_agg_1 $12) $13)) (let $15 '('('"_logical_id" '2835) '('"_id" '"1db6091d-ecf8b287-d2f8a014-ea54be58") '('"_wide_channels" $14))) (let $16 (DqPhyStage '() (lambda '() (block '( (let $29 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $30 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '0))) (let $31 (KqpBlockReadOlapTableRanges $29 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $30 (lambda '($37) $37))) (let $32 (lambda '($45) (Just '((Convert $45 'Double) (Uint64 '1))))) (let $33 (Nothing $12)) (let $34 (lambda '($40 $41 $42) (block '( (let $43 (Uint64 '1)) (let $44 (IfPresent $42 $32 $33)) (return (Convert $41 'Double) $43 $44) )))) (let $35 (lambda '($46 $47 $48 $49 $50 $51) (block '( (let $52 (IfPresent $48 $32 $33)) (let $53 (IfPresent $51 (lambda '($54) (IfPresent $48 (lambda '($55) (Just '((AggrAdd (Nth $54 '0) (Convert $55 'Double)) (Inc (Nth $54 '1))))) (Just $54))) $52)) (return (AggrAdd $49 (Convert $47 'Double)) (Inc $50) $53) )))) (let $36 (lambda '($56 $57 $58 $59) '($57 $58) $59 $56)) (return (DqPhyHashCombine (WideFromBlocks (FromFlow $31)) '-1073741824 (lambda '($38 $39) $39) $34 $35 $36)) ))) $15)) (let $17 (DqCnHashShuffle (TDqOutput $16 '0) '('2) '0 '"HashV2")) (let $18 (OptionalType $10)) (let $19 (StructType '('"column1" $10) '('"column2" $18) $13)) (let $20 '('('"_logical_id" '8455) '('"_id" '"65fb4ddf-802d4528-eaf8278a-d8430a74") '('"_wide_channels" $19))) (let $21 (DqPhyStage '($17) (lambda '($60) (block '( (let $61 (lambda '($68 $69 $70 $71) (Nth $69 '0) (Nth $69 '1) $70)) (let $62 (lambda '($72 $73 $74 $75 $76 $77 $78) (block '( (let $79 (IfPresent $74 (lambda '($80) (IfPresent $78 (lambda '($81) (Just '((AggrAdd (Nth $80 '0) (Nth $81 '0)) (AggrAdd (Nth $80 '1) (Nth $81 '1))))) $74)) $78)) (return (AggrAdd (Nth $73 '0) $76) (AggrAdd (Nth $73 '1) $77) $79) )))) (let $63 (lambda '($82 $83 $84 $85) (block '( (let $86 (IfPresent $85 (lambda '($87) (Just (Div (Nth $87 '0) (Nth $87 '1)))) (Nothing $18))) (return (Div $83 $84) $86 $82) )))) (let $64 (WideCombiner (ToFlow $60) '"" (lambda '($65 $66 $67) $67) $61 $62 $63)) (return (FromFlow (WideSort $64 '('('2 (Bool 'true)))))) ))) $20)) (let $22 (DqCnMerge (TDqOutput $21 '0) '('('2 '"Asc")))) (let $23 (DqPhyStage '($22) (lambda '($88) (FromFlow (NarrowMap (ToFlow $88) (lambda '($89 $90 $91) (AsStruct '('"column1" $89) '('"column2" $90) '('"level" $91)))))) '('('"_logical_id" '8467) '('"_id" '"50e30a8b-a571b522-f8b9240b-1c3dc682")))) (let $24 '($16 $21 $23)) (let $25 '('"level" '"column1" '"column2")) (let $26 (DqCnResult (TDqOutput $23 '0) $25)) (let $27 (KqpTxResultBinding $9 '0 '0)) (let $28 (KqpPhysicalTx $24 '($26) '('($7 $27)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($6 $28) '((KqpTxResultBinding (ListType $19) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[10,true,1,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7959, MsgBus: 11462 2025-10-24T16:21:44.908222Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824274396658061:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:44.908250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00135c/r3tmp/tmpmb4JJx/pdisk_1.dat 2025-10-24T16:21:45.069585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:45.073990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:45.074017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:45.088462Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:45.089003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7959, node 1 2025-10-24T16:21:45.166356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:45.166369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:45.166371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:45.166446Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11462 TClient is connected to server localhost:11462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:45.252926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:45.255587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:45.354376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:45.904254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:46.112141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282986593089:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.112189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.112260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824282986593099:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.112265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.213075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:46.299112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.299215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.299288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.299307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.299323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.299343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.299363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.299383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46.299403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:46.299419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:46.299438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:46.299454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:46.299477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824282986593244:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:46.300838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.300921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.300996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.301017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.301090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.301116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.301141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.301166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824282986593222:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46 ... COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447196Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447303Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447316Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447423Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447536Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447559Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.447719Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:21:52.452208Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824305833190173:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.452227Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.452233Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824305833190178:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.452262Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824305833190180:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.452267Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.452833Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:52.461648Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824305833190181:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:52.532856Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824305833190233:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:4/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[[""]];[[""]];[["{\"others\":{\"accessor\":[1,1],\"size\":[2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\"],\"records\":[1,1]},\"columns\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[1]},\"o_size\":632,\"h_size\":736,\"c_size\":240}"]];[["{\"others\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[1]},\"columns\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"d.e.f\\\"\"],\"records\":[1]},\"o_size\":624,\"h_size\":720,\"c_size\":240}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"a.b.c\\\"\"],\"records\":[1]},\"o_size\":0,\"h_size\":360,\"c_size\":240}"]];[["{\"others\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"a.b.c\\\"\"],\"records\":[1]},\"columns\":{\"accessor\":[1],\"size\":[11],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[1]},\"o_size\":624,\"h_size\":720,\"c_size\":248}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.5%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |50.5%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 >> KqpOlapJson::CompactionVariants[1,true,1,0,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,0,100,0.5] >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,1000000,0] |50.5%| [TS] {RESULT} ydb/tests/fq/common/flake8 |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/ydb-dstool >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,1000000,0] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,0,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,1000000,0] >> KqpOlapJson::CompactionVariants[10,true,1024,0,100,0.5] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,1000000,0.5] >> KqpOlapJson::FilterVariants[2,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,false,1,10,0,0.5] |50.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |50.5%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |50.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,0,0] [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,100,0] Test command err: Trying to start YDB, gRPC: 30211, MsgBus: 28563 2025-10-24T16:21:42.751542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824262938265754:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:42.751571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00136f/r3tmp/tmpvsvtNv/pdisk_1.dat 2025-10-24T16:21:42.856776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:42.859534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:42.859557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:42.860744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:42.873643Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30211, node 1 2025-10-24T16:21:42.901969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:42.901979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:42.901980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:42.902040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28563 TClient is connected to server localhost:28563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:43.020005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:43.024560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:43.401966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824267233233597:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.401990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.402040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824267233233607:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.402051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:43.445430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:43.460996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:43.461041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:43.461048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:43.461078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:43.461083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:43.461098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:43.461114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:43.461116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:43.461131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:43.461134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:43.461148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:43.461152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:43.461175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:43.461175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:43.461192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:43.461194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:43.461205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:43.461207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:43.461220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:43.461227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:43.461236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824267233233722:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:43.461248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824267233233721:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:43.461252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403 ... ual_to_use;from=1;to=2; 2025-10-24T16:21:54.003604Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824308494132783:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.003607Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824308494132829:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.003667Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824308494132829:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913049;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.003670Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824308494132783:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913049;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.003679Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824308494132783:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913077;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.003679Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824308494132829:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913077;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.003715Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824308494132829:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913210;tx_id=281474976715664;;new_schema=Id: 0 SinceStep: 1761322913210 SinceTxId: 281474976715664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:54.003715Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824308494132783:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913210;tx_id=281474976715664;;new_schema=Id: 0 SinceStep: 1761322913210 SinceTxId: 281474976715664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:54.003890Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:54.003899Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.003902Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:54.003979Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913049;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.003991Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913077;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.003999Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913210;tx_id=281474976715664;;is_diff=1;version=3; 2025-10-24T16:21:54.004041Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824308494132807:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913224;tx_id=281474976715665;;new_schema=Id: 0 SinceStep: 1761322913224 SinceTxId: 281474976715665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:54.004239Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:54.004247Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.004250Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:54.004326Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913049;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.004340Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913077;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.004344Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913210;tx_id=281474976715664;;is_diff=1;version=3; 2025-10-24T16:21:54.004388Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824308494132808:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913224;tx_id=281474976715665;;new_schema=Id: 0 SinceStep: 1761322913224 SinceTxId: 281474976715665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.5%| [TS] {RESULT} ydb/tests/stress/ctas/tests/import_test |50.5%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,0,0,0,0.5] [GOOD] |50.5%| [TS] {RESULT} ydb/tests/olap/scenario/import_test |50.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |50.5%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test |50.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,0,0] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatExecutorLeases::Basics >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] |50.6%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 61608, MsgBus: 10643 2025-10-24T16:21:47.615942Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824283696213936:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:47.616022Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001357/r3tmp/tmpk3tHB8/pdisk_1.dat 2025-10-24T16:21:47.647283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:47.656853Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:47.657039Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824283696213818:2081] 1761322907614967 != 1761322907614970 TServer::EnableGrpc on GrpcPort 61608, node 1 2025-10-24T16:21:47.667753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:47.667766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:47.667768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:47.667807Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10643 TClient is connected to server localhost:10643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:47.724387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:47.724416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:47.725285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:47.727392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:47.860098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:47.953588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824283696214499:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.953618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.953698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824283696214509:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.953712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.985540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:47.993902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:47.993966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:47.994024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:47.994049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:47.994062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:47.994080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:47.994099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:47.994113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:47.994132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:47.994150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:47.994167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:47.994186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:47.994203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824283696214562:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:47.994601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:47.994613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:47.994623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:47.994626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:47.994638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:47.994641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:47.994649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:47.994657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:47.994661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:47.994664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:47.994668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037 ... de 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:53.682413Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.682417Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:53.682424Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:53.682432Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:53.682438Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:53.682448Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:53.682454Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.682461Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:53.682467Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:53.684834Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824313107309710:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=55338428073632;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322913684;max=18446744073709551615;plan=0;src=[7:7564824313107309339:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:53.686789Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:53.686804Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:53.686806Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:53.694215Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309782:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.694230Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.694327Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309784:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.694337Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.697469Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:53.700631Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:53.705520Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309815:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.705544Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.705591Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309817:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.705600Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.709116Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:53.714045Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:21:53.718762Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309847:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.718788Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.718835Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309852:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.718847Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313107309853:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.718858Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.719644Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:53.728755Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824313107309856:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:53.812923Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824313107309907:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 |50.6%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |50.6%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |50.6%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> KqpOlapJson::FilterVariantsCount[1,true,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,1000,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,1000000,0] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,1000000,0.5] |50.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |50.6%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |50.6%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/import_test |50.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |50.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 >> KqpOlapJson::FilterVariantsCount[10,true,1,0,0,0] [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,1000000,0] [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> KqpOlapJson::SimpleExistsVariants[2,false,0,10,0,0.5] |50.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |50.6%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |50.6%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test >> KqpOlapJson::FilterVariantsCount[10,true,1,0,0,0.5] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,1000000,0.5] |50.6%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |50.6%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test >> KqpOlapJson::CompactionVariants[1,false,1,0,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,0,1000000,0.5] [GOOD] >> BuildStatsHistogram::Ten_Serial >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,0,0.5] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,10,0,0.5] |50.6%| [TS] {RESULT} ydb/tests/datashard/ttl/flake8 |50.6%| [TS] {RESULT} ydb/tests/functional/secrets/flake8 |50.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |50.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |50.6%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |50.6%| [TS] {RESULT} ydb/tests/stress/transfer/tests/import_test |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load >> KqpOlapJson::SimpleVariants[2,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,0,0] [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,100,0] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables |50.7%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |50.7%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 |50.7%| [TS] {RESULT} ydb/tests/fq/streaming/flake8 |50.7%| [TS] {RESULT} ydb/tests/functional/tpcc/flake8 |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/ydb-tests-functional-security >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,1000000,0.5] [GOOD] >> TFlatExecutorLeases::Basics [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,100,0] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData >> TFlatExecutorLeases::BasicsLeaseTimeout >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,0,0] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] |50.7%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |50.7%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test |50.7%| [TS] {RESULT} ydb/tests/sql/large/flake8 |50.7%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/flake8 >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,0,0,1000000,0.5] [GOOD] >> DBase::Select [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,0,100,0.5] [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,1000000,0] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,1000,1000000,0] >> DBase::Subsets [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,10,0,0] >> DBase::Garbage [GOOD] >> DBase::WideKey |50.7%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/import_test |50.7%| [TS] {RESULT} ydb/tests/functional/cms/import_test >> KqpOlapJson::CompactionVariants[1,true,1,0,1000000,0] |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 6733, MsgBus: 24019 2025-10-24T16:21:42.176365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824262292104214:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:42.176505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001376/r3tmp/tmphktv0f/pdisk_1.dat 2025-10-24T16:21:42.205365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:42.218045Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6733, node 1 2025-10-24T16:21:42.226626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:42.226638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:42.226640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:42.226705Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24019 TClient is connected to server localhost:24019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:42.278986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:42.279011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:42.280107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:42.285088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:42.443527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:42.671261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824262292104762:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.671305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.671422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824262292104772:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.671436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.834384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:42.875986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.876083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.876126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.876186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.876230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.876249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.876320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.876339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.876395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.876420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:42.876439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:42.876456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:42.876474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824262292104908:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:42.876625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.876650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.876693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.876718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.876742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.876771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.876790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.876813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.876836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824262292104907:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.876860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403 ... og.cpp:841: tablet_id=72075186224037890;self_id=[6:7564824307847580268:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912776;tx_id=281474976715664;;is_diff=1;version=3; 2025-10-24T16:21:53.556389Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[6:7564824307847580268:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322912790;tx_id=281474976715665;;new_schema=Id: 0 SinceStep: 1761322912790 SinceTxId: 281474976715665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:53.556716Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:53.556725Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:53.556728Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:53.556803Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912622;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:53.556816Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912636;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:53.556821Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912776;tx_id=281474976715664;;is_diff=1;version=3; 2025-10-24T16:21:53.556866Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564824307847580196:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322912790;tx_id=281474976715665;;new_schema=Id: 0 SinceStep: 1761322912790 SinceTxId: 281474976715665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:53.557390Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:53.557396Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:53.557398Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:53.557454Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912622;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:53.557460Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912636;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:53.557462Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912776;tx_id=281474976715664;;is_diff=1;version=3; 2025-10-24T16:21:53.557494Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824307847580165:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322912790;tx_id=281474976715665;;new_schema=Id: 0 SinceStep: 1761322912790 SinceTxId: 281474976715665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:53.559613Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564824307847580205:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:53.559624Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564824307847580205:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:53.559708Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564824307847580205:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912622;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:53.559723Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564824307847580205:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322912636;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:53.559766Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564824307847580205:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322912776;tx_id=281474976715664;;new_schema=Id: 0 SinceStep: 1761322912776 SinceTxId: 281474976715664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/topic_it >> KqpOlapJson::SimpleExistsVariants[2,false,0,10,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,1000000,0] [GOOD] >> DBase::WideKey [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse >> KqpOlapJson::FilterVariantsCount[10,true,1,0,100,0] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,1000000,0.5] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem |50.7%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/with_direct_read/topic_direct_read_it |50.7%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |50.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/large/ydb-tests-olap-s3_import-large |50.7%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test |50.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> DBase::VersionPureMem [GOOD] |50.7%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |50.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants[1,true,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 8165, MsgBus: 25848 2025-10-24T16:21:49.458304Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824294151100401:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:49.458330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001351/r3tmp/tmp909rRz/pdisk_1.dat 2025-10-24T16:21:49.497613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:49.510114Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:49.511892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824294151100374:2081] 1761322909458058 != 1761322909458061 TServer::EnableGrpc on GrpcPort 8165, node 1 2025-10-24T16:21:49.518775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:49.518786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:49.518788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:49.518824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25848 TClient is connected to server localhost:25848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:49.567215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:49.567242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:49.568009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:49.568355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:49.790048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:49.812987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824294151101054:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.813012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.813060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824294151101064:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.813069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.846852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:49.856892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:49.856947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:49.856997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:49.857026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:49.857051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:49.857074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:49.857102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:49.857125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:49.857146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:49.857171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:49.857195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:49.857219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:49.857241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824294151101117:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:49.857787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:49.857802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:49.857826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:49.857831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:49.857852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:49.857860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:49.857870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:49.857879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:49.857885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:49.857892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:49.857902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=T ... witched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:54.092775Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:54.092780Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:54.092786Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:54.092790Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:54.092796Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:54.092801Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:54.092816Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:54.092821Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:54.092831Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:54.092836Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:54.094178Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824315477089221:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=53561386498528;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322914094;max=18446744073709551615;plan=0;src=[6:7564824311182121546:2149];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.095673Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.095687Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.095691Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:54.100610Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089293:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.100631Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.100667Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089295:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.100674Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.102867Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:54.105673Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:54.114246Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089325:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.114264Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.114381Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089327:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.114391Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.116479Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:54.119607Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-10-24T16:21:54.123862Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089358:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.123883Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.123901Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089363:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.123908Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824315477089365:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.123913Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.124572Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:54.127312Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824315477089367:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:54.197301Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824315477089418:2441] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] OUTPUT: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] INDEX:0/0/0 HEADER:0/0/0 >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[2,false,1,10,0,0.5] [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> DBase::VersionPureParts Test command err: Trying to start YDB, gRPC: 24776, MsgBus: 30752 2025-10-24T16:21:41.757592Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824259795560913:2140];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:41.757647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f5c/r3tmp/tmpDdCWFY/pdisk_1.dat 2025-10-24T16:21:41.802633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:41.804960Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:41.805119Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824259795560811:2081] 1761322901757052 != 1761322901757055 TServer::EnableGrpc on GrpcPort 24776, node 1 2025-10-24T16:21:41.809533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:41.809543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:41.809545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:41.809586Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30752 TClient is connected to server localhost:30752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:41.868095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:41.868132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:41.868948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:41.868969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:41.872294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:21:41.968817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:42.125047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824264090528782:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.125075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.125143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824264090528792:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.125152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.166971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:42.181103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.181108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.181146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.181165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.181210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.181214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.181229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.181237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.181245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.181257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.181259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.181274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.181278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.181294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.181299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.181311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.181321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.181328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:42.181345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824264090528855:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.181345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:42.181358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824264090528856:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobI ... 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.164426Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824309201681037:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.164431Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.164437Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824309201681036:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.165026Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:53.169028Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824309201681040:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:21:53.268740Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824309201681091:2495] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:53.288542Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:21:53.288566Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:21:53.317385Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:21:53.317435Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:21:53.347039Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=26;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:53.669838Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:54.112327Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:54.112341Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.112346Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:54.112482Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913161;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.112497Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913175;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.112502Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913189;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:21:54.112650Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824309201680820:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913203;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322913203 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:54.113262Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:54.113273Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:54.113277Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:54.113396Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913161;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:21:54.113413Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913175;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:21:54.113418Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322913189;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:21:54.113480Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824309201680821:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322913203;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322913203 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex |50.8%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> KqpOlapJson::CompactionVariants[10,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,0,1000000,0] >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,100,0] [GOOD] >> TBtreeIndexBuilder::NoNodes [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,100,0.5] >> DBase::VersionCompactedMem [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables |50.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |50.8%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |50.8%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/import_test >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower |50.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |50.8%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 18466, MsgBus: 62931 2025-10-24T16:21:46.796750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824281304426307:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:46.796774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:46.809449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001359/r3tmp/tmpzsc72Z/pdisk_1.dat 2025-10-24T16:21:46.883524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:46.883680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:46.892658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:46.910993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824281304426280:2081] 1761322906796417 != 1761322906796420 2025-10-24T16:21:46.919878Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18466, node 1 2025-10-24T16:21:46.958266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:46.958278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:46.958280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:46.958321Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:47.000151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62931 TClient is connected to server localhost:62931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:47.106026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:47.329657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824285599394250:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.329690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.329837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824285599394260:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.329859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:47.372402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:47.396669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:47.396669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:47.396733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:47.396748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:47.396798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:47.396798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:47.396822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:47.396823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:47.396850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:47.396850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:47.396877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:47.396880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:47.396909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:47.396910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:47.396934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:47.396934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:47.396977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:47.396982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:47.397004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:47.397008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:47.397038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824285599394368:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:47.397038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824285599394391:2325];tablet_id=72075186224037895;process=TTxInitSchema::Execute ... =schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067068Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067221Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067382Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067554Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067702Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067760Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067882Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:52.067991Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:52.073560Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824307694810411:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.073585Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.073669Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824307694810417:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.073671Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824307694810416:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.073683Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.074405Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:52.076278Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824307694810420:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:52.147466Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824307694810471:2688] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |50.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |50.8%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |50.8%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |50.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |50.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> TBtreeIndexBuilder::SplitBySize [GOOD] >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TieredCache::Touch [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,10,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,1000000,0.5] |50.8%| [TS] {RESULT} ydb/tests/stress/show_create/view/tests/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |50.8%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/import_test >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TieredCache::InsertUntouched [GOOD] >> TieredCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,100,0.5] >> KqpOlapJson::BrokenJsonWriting[1,true,0,10,0,0.5] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued >> TBtreeIndexTPart::Erases [GOOD] |50.8%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 9807322664341237738 2025-10-24T16:21:24.131771Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-10-24T16:21:24.137821Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-10-24T16:21:24.137837Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-10-24T16:21:24.138833Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-10-24T16:21:24.437242Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-10-24T16:21:24.437873Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-10-24T16:21:49.046797Z 3 00h01m08.571000s :BS_LOGCUTTER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 20466 2025-10-24T16:21:56.359699Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-10-24T16:21:56.359722Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-10-24T16:21:56.396278Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} >> TRowVersionRangesTest::SimpleInserts [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,1000,1000000,0.5] >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages [GOOD] >> TBtreeIndexTPart::History [GOOD] >> TS3FIFOCache::InsertUntouched [GOOD] >> TS3FIFOCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TBtreeIndexTPart::External [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TChargeBTreeIndex::NoNodes >> TScreen::Join [GOOD] >> TScreen::Sequential >> KqpOlapJson::CompactionVariants[1,false,1,0,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,100,0] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,1000000,0] [GOOD] >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,1000000,0.5] >> TScreen::Sequential [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TScreen::Random >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits |50.8%| [TS] {RESULT} ydb/tests/olap/s3_import/import_test |50.9%| [TS] {RESULT} ydb/tests/olap/s3_import/large/flake8 |50.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |50.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |50.9%| [TS] {RESULT} ydb/tests/stress/show_create/view/tests/import_test |50.9%| [TS] {RESULT} ydb/tests/functional/api/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |50.9%| [TS] {RESULT} ydb/library/yql/dq/comp_nodes/hash_join_utils/simd/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariants[2,true,1024,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 3159, MsgBus: 14268 2025-10-24T16:21:48.003118Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824288556139869:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:48.003141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001356/r3tmp/tmpQWTmZZ/pdisk_1.dat 2025-10-24T16:21:48.048734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:48.049241Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:48.049453Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824288556139843:2081] 1761322908002988 != 1761322908002991 TServer::EnableGrpc on GrpcPort 3159, node 1 2025-10-24T16:21:48.057650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:48.057663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:48.057664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:48.057701Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14268 TClient is connected to server localhost:14268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:48.106365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:48.106389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:48.107477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:48.116739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:48.308797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:48.339677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824288556140522:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.339696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.339731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824288556140532:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.339735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.368923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:48.382914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:48.383017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:48.383115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:48.383163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:48.383210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:48.383253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:48.383291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:48.383321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:48.383351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:48.383380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:48.383444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:48.383478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:48.383505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288556140593:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:48.383730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:48.383761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:48.383805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:48.383839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:48.383885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:48.383915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:48.383942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:48.383969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:48.383996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824288556140594:2324];tablet_id=72075186224037888;process=TT ... 976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.379105Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.379105Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:54.379108Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:54.385788Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667451:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.385806Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.385863Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667453:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.385870Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.388147Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:54.392987Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.392987Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:54.397433Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667487:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.397449Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.397524Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667489:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.397531Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.400632Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:54.407094Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.407094Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:54.412156Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667524:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.412183Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.412189Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667529:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.412224Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824313619667531:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.412235Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.413021Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:54.421949Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824313619667532:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:54.487610Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824313619667584:2466] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".c") = "1" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".e.\"c.a\"") = "2" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:2/0/0 HEADER:0/0/0 |50.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[1,true,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 31027, MsgBus: 32201 2025-10-24T16:21:49.473135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824293258004428:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:49.473165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00134f/r3tmp/tmpcw9oIR/pdisk_1.dat 2025-10-24T16:21:49.513854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:49.523434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824293258004323:2081] 1761322909472333 != 1761322909472336 2025-10-24T16:21:49.526667Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31027, node 1 2025-10-24T16:21:49.532630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:49.532639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:49.532640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:49.532677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32201 TClient is connected to server localhost:32201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:49.577752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:49.584225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:49.584245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:49.585392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:49.796509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:49.816683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824293258004997:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.816704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.816764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824293258005007:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.816772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.868249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:49.879050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:49.879113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:49.879174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:49.879212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:49.879240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:49.879275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:49.879314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:49.879340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:49.879376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:49.879400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:49.879435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:49.879462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:49.879497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824293258005060:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:49.880184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:49.880202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:49.880236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:49.880246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:49.880271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:49.880281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:49.880296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:49.880301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:49.880314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:49.880321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:49.880329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... : log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:55.615836Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:55.615840Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:55.615845Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:55.615869Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:55.615876Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:55.615880Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:55.615889Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:55.615892Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:55.615900Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:55.615906Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:55.617416Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824319944088507:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=20661956130608;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322915617;max=18446744073709551615;plan=0;src=[7:7564824319944088148:2154];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.618674Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.618686Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.618688Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:55.625230Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088579:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.625250Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.625290Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088581:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.625300Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.628293Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.631932Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:55.635793Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088611:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.635814Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.635884Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088613:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.635895Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.638529Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.645663Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:55.650976Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088644:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.650994Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.651019Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088651:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.651023Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319944088649:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.651025Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.651584Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:55.660551Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824319944088653:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:55.754676Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824319944088704:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:1/0/0 HEADER:0/0/0 |50.9%| [TS] {RESULT} ydb/tests/stress/ctas/tests/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,100,0.5] >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TPart::State [GOOD] >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,100,0] [GOOD] >> TSharedPageCache::Limits [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> TSharedPageCache::Limits_Config |50.9%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test |50.9%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |50.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |50.9%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/import_test |50.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22976, MsgBus: 23681 2025-10-24T16:21:49.718264Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824293437880219:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:49.718320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00134e/r3tmp/tmpOy2wSQ/pdisk_1.dat 2025-10-24T16:21:49.756631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:49.757225Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:49.757302Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824293437880114:2081] 1761322909717616 != 1761322909717619 TServer::EnableGrpc on GrpcPort 22976, node 1 2025-10-24T16:21:49.765109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:49.765120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:49.765122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:49.765165Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23681 TClient is connected to server localhost:23681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:49.827047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:49.827069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:49.828091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:49.828456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:49.957848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:50.132365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824297732848090:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.132400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.132482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824297732848100:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.132496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.179465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:50.190847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:50.190903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:50.190961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:50.191005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:50.191028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:50.191060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:50.191087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:50.191121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:50.191143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:50.191168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:50.191195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:50.191226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:50.191251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824297732848153:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:50.191912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:50.191929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:50.191944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:50.191948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:50.191969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:50.191978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:50.191991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:50.192000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:50.192007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:50.192017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:50.192024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037 ... HARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:55.664244Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:55.664250Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:55.664259Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:55.664264Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:55.664270Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:55.664281Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:55.664288Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:55.664296Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:55.664303Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:55.666084Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824319357964067:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=52843038990160;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322915665;max=18446744073709551615;plan=0;src=[7:7564824319357963728:2167];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.667217Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.667243Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.667245Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:55.673528Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964139:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.673544Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.673574Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964141:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.673582Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.675947Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.681043Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:55.685425Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964171:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.685442Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.685476Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964173:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.685488Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.688192Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.694909Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:21:55.699494Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964204:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.699513Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.699537Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964209:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.699556Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824319357964210:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.699566Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.700241Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:55.702469Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824319357964213:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:55.791628Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824319357964264:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 >> TFlatExecutorLeases::BasicsInitialLease >> TSharedPageCache_Transactions::Compaction >> TPart::Trivials [GOOD] >> Memtable::Basics [GOOD] >> TPart::WreckPart >> KqpOlapJson::FilterVariantsCount[10,true,1,0,100,0.5] >> Memtable::BasicsReverse [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0.5,BLOOM_FILTER] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck [GOOD] >> Memtable::Erased [GOOD] >> TSharedPageCache_Transactions::Compaction [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> Vacuum::StartVacuumWithFollowers [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> Vacuum::StartVacuumWithRestarts [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> TSharedPageCache::Limits_Config [GOOD] >> TPart::WreckPart [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,1000000,0.5] [GOOD] >> TPart::WreckPartColumnGroups >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleVariants[2,false,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1283, MsgBus: 22397 2025-10-24T16:21:50.490241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824298188548040:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:50.490276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00134c/r3tmp/tmp3SX8xp/pdisk_1.dat 2025-10-24T16:21:50.547111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:50.561151Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824298188548016:2081] 1761322910490073 != 1761322910490076 2025-10-24T16:21:50.562428Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1283, node 1 2025-10-24T16:21:50.571084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:50.571097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:50.571099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:50.571143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22397 2025-10-24T16:21:50.600685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:50.600717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:50.601751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:50.638154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:50.776056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:50.872585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824298188548689:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.872617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.872684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824298188548699:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.872695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:50.922650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:50.932192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:50.932248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:50.932283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:50.932302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:50.932320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:50.932337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:50.932357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:50.932374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:50.932390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:50.932409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:50.932430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:50.932455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:50.932476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824298188548760:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:50.933015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:50.933027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:50.933037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:50.933043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:50.933065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:50.933071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:50.933080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:50.933086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:50.933090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:50.933093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:50.933098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=T ... 25715472;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322915457;max=18446744073709551615;plan=0;src=[6:7564824321566910702:2150];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.457663Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824321566911092:2324];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715658;this=19754619616816;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322915457;max=18446744073709551615;plan=0;src=[6:7564824321566910702:2150];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459415Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459432Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459435Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459738Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459754Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.459757Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:55.464795Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911179:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.464819Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.464854Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911181:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.464861Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.467500Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.470525Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:55.470535Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:55.474988Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911215:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.475010Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.475114Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911217:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.475124Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.478262Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.485109Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:55.485109Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:55.489810Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911252:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.489855Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.489877Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911257:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.489888Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824321566911259:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.489899Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.490683Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:55.500187Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824321566911261:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:21:55.558053Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824321566911312:2465] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |51.0%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[2,false,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7543, MsgBus: 16546 2025-10-24T16:21:51.328117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824302778180862:2250];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.328142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001341/r3tmp/tmpOuogwt/pdisk_1.dat 2025-10-24T16:21:51.395875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:51.396122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.396145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.397218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:51.409058Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:51.409246Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824302778180641:2081] 1761322911324963 != 1761322911324966 TServer::EnableGrpc on GrpcPort 7543, node 1 2025-10-24T16:21:51.416839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.416851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.416853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.416885Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16546 TClient is connected to server localhost:16546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.482309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:51.664606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:51.749449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824302778181316:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.749470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.749585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824302778181326:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.749590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.795654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:51.806768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.806822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.806857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.806878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.806897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.806919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.806942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.806961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.806983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:51.807001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.807021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:51.807038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:51.807060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302778181386:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:51.807818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.807877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.807914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.807932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.807956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.807976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.808008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.808031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.808051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302778181403:2324];tablet_id=72075186224037889;process=TT ... 10658;this=19960798580848;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322915979;max=18446744073709551615;plan=0;src=[6:7564824318867705659:2149];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.980457Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824318867706026:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=19960761560112;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322915980;max=18446744073709551615;plan=0;src=[6:7564824318867705659:2149];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982063Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982081Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982084Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982088Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982099Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:55.982103Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:55.990545Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824318867706126:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.990564Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.990598Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824318867706128:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.990605Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.993037Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:55.995551Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:55.995565Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:55.999490Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824318867706162:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.999510Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.999561Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824318867706164:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:55.999572Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.002260Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:56.009983Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:56.010007Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:56.014050Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824323162673495:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.014068Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.014095Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824323162673500:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.014109Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824323162673501:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.014123Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.014693Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:56.017062Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824323162673504:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:56.077322Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824323162673555:2465] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |51.0%| [TS] {RESULT} ydb/tests/stress/cdc/tests/import_test |51.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[10,true,1,0,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63327, MsgBus: 10410 2025-10-24T16:21:42.020125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824265875789357:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:42.020140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001277/r3tmp/tmp0M3qBA/pdisk_1.dat 2025-10-24T16:21:42.048631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:42.060085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824265875789319:2081] 1761322902019737 != 1761322902019740 2025-10-24T16:21:42.064207Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63327, node 1 2025-10-24T16:21:42.069985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:42.069997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:42.069998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:42.070034Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10410 TClient is connected to server localhost:10410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:42.117951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:42.123160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:42.123190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:42.124261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:42.282045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:42.362803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824265875789998:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.362826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.362872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824265875790008:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.362882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:42.398463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:42.420420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.420420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:42.420467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.420480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:42.420545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.420545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:42.420579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.420580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:42.420629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.420631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:42.420666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.420670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:42.420705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.420707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:42.420738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.420745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:42.420769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.420774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:42.420798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:42.420801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:42.420828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824265875790124:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:42.420828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824265875790125:2325];tablet_id=72075186224037888;process=T ... DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:55.291427Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:55.291439Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:55.291442Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:55.291510Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914351;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:55.291523Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914372;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:55.291526Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914386;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:55.291561Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[7:7564824315344252901:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322914400;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322914400 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:55.291735Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:55.291745Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:55.291748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:55.291805Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914351;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:55.291818Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914372;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:55.291821Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914386;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:55.291871Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824315344252993:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322914400;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322914400 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:55.292938Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:55.292948Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:55.292951Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:55.293048Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914351;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:55.293062Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914372;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:55.293066Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322914386;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:55.293107Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824315344252934:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322914400;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322914400 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 3 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> NFwd_TBlobs::Trace [GOOD] >> TSharedPageCache::S3FIFO >> Vacuum::StartVacuumWithTabletGCErrors >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors |51.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |51.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/ttl/import_test >> NFwd_TBTreeIndexCache::GetTwice [GOOD] |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |51.0%| [TS] {RESULT} ydb/tests/stress/viewer/tests/flake8 >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,0,0] >> TSharedPageCache::S3FIFO [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,100,0] [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,100,0.5] >> TVersions::WreckHead >> TSharedPageCache::BigCache_BTreeIndex >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,1000000,0] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] |51.0%| [TS] {RESULT} ydb/tests/stress/viewer/tests/import_test |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |51.0%| [TS] {RESULT} ydb/tests/olap/oom/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:55.022997Z 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.004 II| FAKE_ENV: Starting storage for BS group 0 00000.004 II| FAKE_ENV: Starting storage for BS group 1 00000.004 II| FAKE_ENV: Starting storage for BS group 2 00000.004 II| FAKE_ENV: Starting storage for BS group 3 00000.006 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.006 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.006 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.006 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.006 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.006 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.006 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.006 II| FAKE_ENV: All BS storage groups are stopped 00000.006 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.006 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:55.030617Z 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.004 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:55.036287Z ... starting the first tablet 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.001 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 84b annex 0, ~{ } -{ }, 0 gb} 00000.001 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{2, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{3, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} hope 1 -> done Change{4, redo 72b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ... making a snapshot 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxMakeSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxMakeSnapshot 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxMakeSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxMakeSnapshot} hope 1 -> done Change{5, redo 64b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxMakeSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 2/8589934597, generation 0 00000.002 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.002 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{1 on 101, Compact{1.2.7, eph 1}} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 1 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.002 DD| OPS_COMPACT: Compact{1.2.7, eph 1} saving [1:2:7:1:69632:156:0] left 156b 00000.003 DD| OPS_COMPACT: Compact{1.2.7, eph 1} saving [1:2:7:1:12288:158:0] left 314b 00000.003 DD| OPS_COMPACT: Compact{1.2.7, eph 1} saving [1:2:7:1:69634:48:0] left 362b 00000.003 DD| OPS_COMPACT: Compact{1.2.7, eph 1} put [1:2:7:1:69632:156:0] result OK flags { Valid } left 206b 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 6 00000.003 DD| OPS_COMPACT: Compact{1.2.7, eph 1} put [1:2:7:1:12288:158:0] result OK flags { Valid } left 48b 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 8 for step 7 00000.003 DD| OPS_COMPACT: Compact{1.2.7, eph 1} put [1:2:7:1:69634:48:0] result OK flags { Valid } left 0b 00000.003 II| OPS_COMPACT: Compact{1.2.7, eph 1} end=Done, 3 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (156 0 0)b }, ecr=1.000 TxStatus{ [1:2:7:1:69634:48:0] } 00000.003 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 1 on TGenCompactionParams{101: gen 0 epoch 2, 0 parts} step 7, product {tx status + 1 parts epoch 2} done 00000.003 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.003 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.003 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ... waiting for snapshot to complete ... borrowing snapshot 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxBorrowSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxBorrowSnapshot 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxBorrowSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxBorrowSnapshot} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxBorrowSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.003 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 ... stopping the first tablet 00000.003 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 583b +(3, 226b), 9 trc, -226b acc} ... starting the second tablet 00000.004 II| TABLET_EXECUTOR: Leader{2:2:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{2:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.004 DD| TABLET_EXECUTOR: Leader{2:2:2} commited cookie 2 for step 1 00000.004 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFl ... E: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 ] 00000.008 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} got result TEvResult{2 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} commited cookie 1 for step 146 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} ... waiting for NActors::TEvents::TEvWakeup 00000.008 II| TABLET_SAUSAGECACHE: Wakeup DoGCManual 00000.008 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) 00000.008 II| TABLET_EXECUTOR: Leader{1:2:147} suiciding, Waste{2:0, 141849b +(140, 14018b), 146 trc, -14018b acc} 00000.008 DD| TABLET_SAUSAGECACHE: Unregister owner [62:30:2062] 00000.008 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:143:1:12288:758:0] owner [62:30:2062] 00000.008 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to Regular 00000.008 DD| TABLET_SAUSAGECACHE: Remove owner [62:30:2062] 00000.008 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.009 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.009 DD| TABLET_SAUSAGECACHE: Drop expired page collection [1:2:143:1:12288:758:0] 00000.009 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 0B Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.009 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.009 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.009 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.009 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 344 bytes, 344 total, blobs: { [1:2:2:1:8192:209:0], [1:2:145:1:8192:20:0], [1:2:146:1:8192:11:0], [1:2:144:1:32768:104:0] } 00000.009 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 14896 bytes, 14896 total, blobs: { [1:2:143:1:12288:758:0], [1:2:3:1:24576:90:0], [1:2:4:1:24576:96:0], [1:2:5:1:24576:96:0], [1:2:6:1:24576:96:0], [1:2:7:1:24576:97:0], [1:2:8:1:24576:95:0], [1:2:9:1:24576:91:0], [1:2:10:1:24576:93:0], [1:2:11:1:24576:96:0], [1:2:12:1:24576:96:0], [1:2:13:1:24576:93:0], [1:2:14:1:24576:97:0], [1:2:15:1:24576:97:0], [1:2:16:1:24576:97:0], [1:2:17:1:24576:96:0], [1:2:18:1:24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:97:0], [1:2:29:1:24576:96:0], [1:2:30:1:24576:100:0], [1:2:31:1:24576:97:0], [1:2:32:1:24576:95:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:100:0], [1:2:102:1:24576:97:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.010 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] 00000.010 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] owner [62:212:2237] 00000.010 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [62:212:2237] cookie 4 class Online from cache [ ] already requested [ ] to request [ 20 21 24 25 ] 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 1.42KiB EvictedInMemoryBytes: 0B 00000.010 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 20 21 24 25 ] 00000.010 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:143:1:12288:758:0] owner [62:212:2237] class Online pages [ 20 21 24 25 ] cookie 4 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.010 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1881b, wait} done, Waste{2:0, 141849b +(140, 14018b), 146 trc} 00000.010 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [62:212:2237] cache mode TryKeepInMemory 00000.010 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to TryKeepInMemory 00000.010 TT| TABLET_SAUSAGECACHE: Try move collection [1:2:143:1:12288:758:0] in memory, total pages: 26 (140KiB), pages already loaded: 4 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 TT| TABLET_SAUSAGECACHE: Try load in-memory collection [1:2:143:1:12288:758:0], total pages: 26 (140KiB), pages to request: 22 (136KiB, remain pages in queue: 0 00000.010 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [62:212:2237] class Scan pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 22 23 ] 00000.010 TT| TABLET_SAUSAGECACHE: Remain in-memory collections to load: 0 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{4 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.010 TT| TABLET_SAUSAGECACHE: Sync page collection [1:2:143:1:12288:758:0] owner [62:212:2237] pages [ 21 24 25 20 ] 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 139KiB EvictedInMemoryBytes: 0B 00000.010 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 22 23 ] 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{22 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} ... waiting for NActors::TEvents::TEvWakeup 00000.010 II| TABLET_SAUSAGECACHE: Wakeup DoGCManual 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.010 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141849b +(0, 0b), 1 trc, -14018b acc} 00000.010 DD| TABLET_SAUSAGECACHE: Unregister owner [62:212:2237] 00000.010 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:143:1:12288:758:0] owner [62:212:2237] 00000.010 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to Regular 00000.010 DD| TABLET_SAUSAGECACHE: Remove owner [62:212:2237] 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {0 0b} miss {4 1002b} 00000.010 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.010 II| FAKE_ENV: DS.1 gone, left {143730b, 8}, put {157887b, 150} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 823}, stopped |51.0%| [TS] {RESULT} ydb/tests/datashard/add_column/flake8 |51.0%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[2,false,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 20717, MsgBus: 12217 2025-10-24T16:21:51.431482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824302658403465:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.432915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:51.436272Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001276/r3tmp/tmpkEuLOq/pdisk_1.dat 2025-10-24T16:21:51.464709Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:51.465698Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824302658403440:2081] 1761322911431301 != 1761322911431304 TServer::EnableGrpc on GrpcPort 20717, node 1 2025-10-24T16:21:51.480129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.480143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.480145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.480183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12217 TClient is connected to server localhost:12217 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:21:51.537576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.537605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.538207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:51.538561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.550410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:51.878144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824302658404113:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.878172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.878240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824302658404123:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.878251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.924298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:51.937539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.937599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.937652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.937676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.937698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.937723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.937737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.937749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.937772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.937772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.937892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:51.937917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.937919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.937942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:51.937942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.937967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:51.937974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.938028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824302658404188:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:51.938031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.938065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.938086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.938107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824302658404184:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execut ... 88;tx_id=281474976710658;this=54745700414544;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322916145;max=18446744073709551615;plan=0;src=[6:7564824318400999855:2154];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147043Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147068Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147071Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147254Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147278Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:56.147281Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:56.153520Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967616:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.153553Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.153657Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967618:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.153682Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.156237Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:56.158301Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:56.158334Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:56.162840Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967652:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.162856Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.162887Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967654:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.162891Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.166204Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:56.171018Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:56.171023Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:56.175892Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967689:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.175898Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967694:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.175914Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.175951Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824322695967697:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.175961Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:56.176542Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:56.178559Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824322695967696:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:56.238739Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824322695967749:2467] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[10,true,1,1000,1000000,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 25106, MsgBus: 28904 2025-10-24T16:21:49.131236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824292390194738:2062];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:49.131253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001352/r3tmp/tmpvSF62D/pdisk_1.dat 2025-10-24T16:21:49.187394Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824292390194717:2081] 1761322909131149 != 1761322909131152 2025-10-24T16:21:49.187434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:49.189960Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25106, node 1 2025-10-24T16:21:49.194603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:49.194619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:49.194621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:49.194661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28904 TClient is connected to server localhost:28904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:49.241689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:49.241726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:49.242702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:49.248570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:49.439021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:49.539719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824292390195392:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.539743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.539809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824292390195402:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.539819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:49.588315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:49.608501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:49.608564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:49.608598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:49.608616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:49.608634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:49.608651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:49.608670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:49.608689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:49.608704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:49.608719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:49.608736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:49.608753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:49.608768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824292390195533:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:49.609177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:49.609192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:49.609204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:49.609207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:49.609218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:49.609221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:49.609228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:49.609231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:49.609233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824292390195518:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:49.609236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;proc ... pe: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:54.392773Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.392774Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393027Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393030Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393224Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393247Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393378Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393457Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393528Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:54.393655Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:21:54.397410Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037894;parent_id=[7:7564824317435776957:2328];path_id=1000000895;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:54.397431Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037894;parent_id=[7:7564824317435776957:2328];path_id=1000000895;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:54.397435Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037894;parent_id=[7:7564824317435776957:2328];path_id=1000000895;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:54.397481Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824317435776957:2328];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037894;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=8dfa9454-b0f511f0-a2542f9b-f7cfd011;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:54.397558Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824317435776957:2328];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037894;event=TEvWritePortionResult;tablet_id=72075186224037894;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037894 in longTx ydb://long-tx/01k8bgdmjwbx4ee4kzwbszx7xq?node_id=7 >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPartBtreeIndexIteration::NoNodes >> KqpOlapJson::QuotedFilterVariants[1,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,0,1000000,0.5] |51.0%| [TS] {RESULT} ydb/tests/sql/lib/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> NPage::GroupIdEncoding [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |51.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |51.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,1000000,0] >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,100,0.5] [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[2,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7255, MsgBus: 22041 2025-10-24T16:21:52.778608Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824306020996963:2178];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.778626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:52.780149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001274/r3tmp/tmpddcQva/pdisk_1.dat 2025-10-24T16:21:52.799886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:52.799909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:52.801121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:52.810140Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824306020996820:2081] 1761322912776833 != 1761322912776836 2025-10-24T16:21:52.814091Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7255, node 1 2025-10-24T16:21:52.820888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.820900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.820902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.820947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22041 TClient is connected to server localhost:22041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:52.906335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:52.916819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:52.919942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:53.148444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310315964787:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.148476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.148666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310315964797:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.148676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.188919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.201638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.201689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.201730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.201750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.201778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.201799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.201822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.201856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.201876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.201900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.201923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.201945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.201965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310315964858:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.202331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.202357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.202388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.202425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.202442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.202459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.202478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.202497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310315964873:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025- ... 658;this=91032829118624;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322917323;max=18446744073709551615;plan=0;src=[6:7564824324744030021:2165];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325515Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325526Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325528Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325550Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325565Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.325567Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:57.331137Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997763:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.331152Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.331183Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997765:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.331196Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.333072Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.339683Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.339683Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:57.343507Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997799:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.343522Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.343556Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997801:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.343563Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.345719Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.353636Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.353638Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:57.358138Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997836:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.358169Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.358171Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997841:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.358247Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824329038997844:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.358274Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.358866Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:57.361056Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824329038997843:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:57.431940Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824329038997896:2465] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[1,true,0,10,0,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 62595, MsgBus: 32100 2025-10-24T16:21:52.769496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824308928586887:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.769958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f5a/r3tmp/tmpXJGlbc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62595, node 1 2025-10-24T16:21:52.824213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:52.824424Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:52.834457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.834470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.834471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.834514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32100 2025-10-24T16:21:52.876391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:52.876420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:32100 2025-10-24T16:21:52.877425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:52.889161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:52.891228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:53.055202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.159983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824313223554809:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.160006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.160116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824313223554819:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.160122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.196745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.211503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.211577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.211622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.211643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.211661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.211688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.211706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.211724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.211745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.211768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.211790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.211810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.211831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313223554872:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.220666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:53.220705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:53.220803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:53.220829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:53.220928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:53.220943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:53.220957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:53.220974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:53.220995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:53.221014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.c ... ched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:57.451964Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:57.451967Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:21:57.451988Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:21:57.452030Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:57.452052Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:57.452058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:57.452064Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:57.452069Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:57.452074Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:57.452083Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:57.452096Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:57.452108Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:57.452118Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:57.452128Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:57.453775Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824326771249485:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=124762266738928;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322917453;max=18446744073709551615;plan=0;src=[7:7564824326771249107:2143];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.455562Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.455584Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.455588Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:57.461044Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326771249557:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.461070Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.461161Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326771249559:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.461173Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.463090Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.465627Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:57.469655Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326771249589:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.469678Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.469720Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326771249592:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.469732Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.471878Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.480342Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:21:57.483614Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824326771249485:2323];path_id=1000000889;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.483633Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824326771249485:2323];path_id=1000000889;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.483637Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824326771249485:2323];path_id=1000000889;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.483663Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824326771249485:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=8fd17edc-b0f511f0-9f3d2b23-1d3bfd6a;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.483702Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824326771249485:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;tablet_id=72075186224037888;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037888 in longTx ydb://long-tx/01k8bgdqka3qn6mme3va6djmvm?node_id=7 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22451, MsgBus: 20762 2025-10-24T16:21:48.614353Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824288851101883:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:48.614425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001353/r3tmp/tmpae3KFE/pdisk_1.dat 2025-10-24T16:21:48.646544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:48.657345Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:48.657533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824288851101770:2081] 1761322908613560 != 1761322908613563 TServer::EnableGrpc on GrpcPort 22451, node 1 2025-10-24T16:21:48.664979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:48.664989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:48.664991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:48.665041Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20762 TClient is connected to server localhost:20762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:48.717310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:48.717341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:48.719109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:48.724693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:48.882942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:48.902590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824288851102450:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.902614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.902681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824288851102460:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.902691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:48.939741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:48.960642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:48.960701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:48.960736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:48.960755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:48.960772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:48.960788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:48.960805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:48.960822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:48.960837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:48.960858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:48.960876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:48.960891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:48.960907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824288851102597:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:48.961077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:48.961104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:48.961146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:48.961171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:48.961194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:48.961219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:48.961244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:48.961288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:48.961318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824288851102580:2325];tablet_id=72075186224037889;process ... nParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.496953Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497106Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497123Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497270Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497290Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497421Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497458Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497567Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:53.497576Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:53.503215Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824312517643167:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.503226Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.503284Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824312517643172:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.503300Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824312517643173:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.503317Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.503846Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:53.511706Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824312517643176:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:53.569366Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824312517643227:2688] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |51.1%| [TS] {RESULT} ydb/tests/datashard/select/flake8 >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] |51.1%| [TS] {RESULT} ydb/core/viewer/tests/import_test |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,1000000,0] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex |51.1%| [TS] {RESULT} ydb/tests/datashard/secondary_index/flake8 |51.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |51.1%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/canonical/import_test >> KqpOlapJson::CompactionVariants[10,true,1024,0,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,0,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,100,0.5] [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] |51.1%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |51.1%| [TS] {RESULT} ydb/tests/olap/column_family/compression/import_test |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[2,false,1,0,100,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 17226, MsgBus: 9730 2025-10-24T16:21:53.205862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824310141671674:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:53.205937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001334/r3tmp/tmp5WcRcI/pdisk_1.dat 2025-10-24T16:21:53.257345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:53.286823Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17226, node 1 2025-10-24T16:21:53.288336Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824310141671571:2081] 1761322913204305 != 1761322913204308 2025-10-24T16:21:53.300006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:53.300016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:53.300018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:53.300053Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:53.317643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:53.317665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:53.318778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9730 TClient is connected to server localhost:9730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:21:53.415469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:53.418779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:53.489992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.951688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310141672248:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.951713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.951862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310141672259:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.951876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.951917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310141672261:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.951927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.030908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:54.056890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:54.057073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:54.057153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:54.057222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:54.057288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:54.057312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:54.057386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:54.057409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:54.057433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:54.057492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824314436639617:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:54.057523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824314436639617:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:54.057539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:54.057566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:54.057591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824314436639617:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:54.057595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:54.057615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824314436639617:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:54.057617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314436639613:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:54.057644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824314436639617:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025- ... tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:57.813259Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:57.813266Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:57.813269Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:57.814854Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824327248289862:2323];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710658;this=92143055509296;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322917814;max=18446744073709551615;plan=0;src=[7:7564824327248289487:2147];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.814855Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824327248289863:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=92143055454560;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322917814;max=18446744073709551615;plan=0;src=[7:7564824327248289487:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816776Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816795Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816798Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816873Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816889Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.816896Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:57.821964Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824327248289962:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.821982Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.822039Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824327248289964:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.822048Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.824106Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.829795Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.829795Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:57.833715Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824327248289998:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.833732Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.833785Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824327248290000:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.833794Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.836560Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.843667Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.843671Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:21:57.847177Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824327248289862:2323];path_id=1000000890;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.847196Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824327248289862:2323];path_id=1000000890;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.847200Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824327248289862:2323];path_id=1000000890;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.847240Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824327248289862:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037889;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=9008f894-b0f511f0-a80532d3-2d76ed43;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:21:57.847292Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824327248289862:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037889;event=TEvWritePortionResult;tablet_id=72075186224037889;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037889 in longTx ydb://long-tx/01k8bgdqyp3zz1tfn4pf5prkp5?node_id=7 |51.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> TSharedPageCache::TryKeepInMemoryMode_Basics >> KqpOlapJson::OrFilterVariants[2,false,0,0,0,0] >> Backup::GenerationDirs [GOOD] >> Backup::SnapshotIOError [GOOD] >> Backup::EmptyData >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> Backup::EmptyData [GOOD] >> Backup::SnapshotData |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/kafka_streams_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[1,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1958, MsgBus: 5045 2025-10-24T16:21:51.482338Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824303918094803:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.482603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:51.485302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b54/r3tmp/tmpJ7QqNV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1958, node 1 2025-10-24T16:21:51.526759Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:51.554031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.554041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.554043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.554078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:51.586540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.586560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.587640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5045 2025-10-24T16:21:51.623474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.662947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:52.030379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824308213062742:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.030417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.036007Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824308213062752:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.036045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.101665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:52.112868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:52.112925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:52.112973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:52.113013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:52.113033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:52.113060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:52.113088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:52.113112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:52.113153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:52.113182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:52.113206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:52.113229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:52.113254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824308213062805:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:52.113819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:52.113846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:52.113859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:52.113864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:52.113887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:52.113896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:52.113907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:52.113915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:52.113922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:52.113931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:52.113937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:52.113946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;proce ... ME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:57.719124Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:57.719132Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:21:57.719139Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:21:57.719143Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:21:57.719157Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:21:57.719165Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:21:57.719179Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:21:57.719188Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:21:57.720901Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824326950525673:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=18790401210800;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322917720;max=18446744073709551615;plan=0;src=[7:7564824326950525307:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.722576Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.722597Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:21:57.722600Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:21:57.728027Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525744:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.728050Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.728099Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525746:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.728110Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.730359Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.732340Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:21:57.736197Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525776:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.736216Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.736283Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525778:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.736294Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.738525Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:21:57.745814Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:21:57.749961Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525809:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.749985Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.750021Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525816:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.750024Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824326950525814:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.750031Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.750811Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:57.753124Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824326950525818:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:57.821297Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824326950525869:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1,1,1],\"size\":[6,15,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[3,3,1,1]},\"o_size\":0,\"h_size\":431,\"c_size\":1104}"]]] INDEX:0/0/0 HEADER:0/0/0 |51.1%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,10,1000000,0] >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> Backup::SnapshotData [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,10,1000000,0] >> Backup::SnapshotLargeData >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets |51.1%| [TS] {RESULT} ydb/tests/functional/wardens/import_test >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] |51.1%| [TS] {RESULT} ydb/tests/olap/load/flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |51.2%| [TS] {RESULT} ydb/tests/olap/delete/flake8 |51.2%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 >> TSharedPageCache::TryKeepInMemoryMode_Disabling >> TSchemeShardServerLess::StorageBilling [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,0,0.5] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level |51.2%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |51.2%| [TS] {RESULT} ydb/tests/sql/import_test |51.2%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[2,false,1,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 62066, MsgBus: 19313 2025-10-24T16:21:45.120362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824276090596942:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:45.120533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00135d/r3tmp/tmp9xIZ6Y/pdisk_1.dat 2025-10-24T16:21:45.221087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:45.244438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:45.244625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:45.246455Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:45.247279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824276090596913:2081] 1761322905115379 != 1761322905115382 2025-10-24T16:21:45.256216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62066, node 1 2025-10-24T16:21:45.316859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:45.316874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:45.316875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:45.317168Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19313 2025-10-24T16:21:45.492552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:45.565180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:46.134737Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:21:46.190345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824280385564902:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.190372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.190422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824280385564912:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.190431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:46.292774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:46.313930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.314064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:46.314243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.314301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.314325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.314351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.314375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.314399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.314423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:46.314600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:46.314693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:46.314749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:46.314751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:46.314772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:46.314775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:46.314797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:46.314802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:46.314818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824280385564989:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:46.314822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:46.314962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:46.314990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824280385564973:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025 ... ame: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:21:57.071794Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330113152582:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.071819Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330113152587:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.071820Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.071888Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330113152589:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.071902Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.072626Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:57.081552Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824330113152590:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:21:57.158038Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824330113152642:2496] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:21:57.172631Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:21:57.173045Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:21:57.195170Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:21:57.212027Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=24;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:21:57.573885Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:21:58.025037Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:58.025051Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:58.025058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:58.025197Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917074;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:58.025214Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917088;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:58.025217Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917102;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:58.025324Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824330113152374:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322917116;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322917116 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:21:58.026005Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:21:58.026016Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:21:58.026020Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:21:58.026138Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917074;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:21:58.026154Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917088;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:21:58.026158Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322917102;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:21:58.026215Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824330113152370:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322917116;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322917116 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[10,true,1,0,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12416, MsgBus: 5953 2025-10-24T16:21:51.605085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824302228541501:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.605106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001275/r3tmp/tmptkvS1r/pdisk_1.dat 2025-10-24T16:21:51.674463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:51.676833Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12416, node 1 2025-10-24T16:21:51.699291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.699307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.699309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.699373Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:51.707822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.707885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.709536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5953 TClient is connected to server localhost:5953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.786682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:51.788750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:51.912388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:52.083936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824306523509417:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.083961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.084019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824306523509427:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.084024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:52.124444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:52.147447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:52.147524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:52.147578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:52.147602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:52.147623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:52.147646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:52.147668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:52.147694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:52.147712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:52.147718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:52.147725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:52.147739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:52.147764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:52.147765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:52.147790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:52.147792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:52.147816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:52.147819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824306523509542:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:52.147838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:52.147931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:52.147950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:52.147969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824306523509602:2331];tablet_id=72075186224037894;process=TTxInitSchema::E ... llowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935195Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935242Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935305Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935399Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935409Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935517Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935620Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:57.935652Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:57.940078Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330504498062:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.940100Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330504498067:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.940103Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.940145Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824330504498070:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.940154Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:57.940584Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:57.949573Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824330504498069:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:58.008068Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824334799465418:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:4/0/0 HEADER:0/0/0 >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,1000000,0.5] >> KqpOlapJson::FilterVariants[1,false,1,0,100,0] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,0,0] >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,0,0] >> KqpOlapJson::CompactionVariants[1,false,1,0,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,100,0.5] >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices |51.2%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test |51.2%| [TS] {RESULT} ydb/tests/stress/kafka/tests/import_test |51.2%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |51.2%| [TS] {RESULT} ydb/core/viewer/tests/flake8 >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups |51.2%| [TS] {RESULT} ydb/tests/fq/s3/import_test |51.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |51.2%| [TS] {RESULT} ydb/tests/datashard/secondary_index/import_test |51.2%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,0,0] |51.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,10,1000000,0] >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:32.899536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:32.899553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.899557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:32.899562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:32.899567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:32.899569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:32.899579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:32.899593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:32.899682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:32.899726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:32.910286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:32.910305Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:32.912512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:32.912534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:32.912550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:32.913608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:32.913762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:32.913836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.913891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:32.914584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.914608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:32.914732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.914739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:32.914753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:32.914758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:32.914763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:32.914777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.915698Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:32.931405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:32.931464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.931511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:32.931517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:32.931585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:32.931598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:32.932229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:32.932296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:32.932306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:32.932310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:32.932652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:32.932970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:32.932984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.932989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:32.933452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:32.933825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:32.933853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:32.934000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:32.934018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:32.934024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.934069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:32.934074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:32.934095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:32.934103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:32.934423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:32.934430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... TTxPublishToSchemeBoard Send, to populator: [1:658:2574], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-10-24T16:21:37.263321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:21:37.263326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-10-24T16:21:37.263337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:21:37.263340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-10-24T16:21:37.263344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 107:0 129 -> 240 2025-10-24T16:21:37.263594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:21:37.263610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:21:37.263615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-10-24T16:21:37.263620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-10-24T16:21:37.263626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-10-24T16:21:37.263905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:21:37.263923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-10-24T16:21:37.263932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-10-24T16:21:37.263936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:21:37.263942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-10-24T16:21:37.263956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-10-24T16:21:37.264473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-10-24T16:21:37.264487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-10-24T16:21:37.264545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-10-24T16:21:37.264580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#107:0 progress is 1/1 2025-10-24T16:21:37.264585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:21:37.264591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#107:0 progress is 1/1 2025-10-24T16:21:37.264595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:21:37.264600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-10-24T16:21:37.264613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:798:2683] message: TxId: 107 2025-10-24T16:21:37.264619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-10-24T16:21:37.264624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 107:0 2025-10-24T16:21:37.264629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 107:0 2025-10-24T16:21:37.264647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-10-24T16:21:37.264829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-10-24T16:21:37.265147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-10-24T16:21:37.265370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-10-24T16:21:37.265378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2694:4531] TestWaitNotification: OK eventTxId 107 2025-10-24T16:21:37.277155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5943: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 772 RawX2: 4294969961 } TabletId: 72075186233409552 State: 4 2025-10-24T16:21:37.277190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-10-24T16:21:37.277584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2025-10-24T16:21:37.277606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-10-24T16:21:37.277707Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2025-10-24T16:21:37.278556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-10-24T16:21:37.278628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-10-24T16:21:37.278836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-10-24T16:21:37.278845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-10-24T16:21:37.278857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-10-24T16:21:37.279422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72075186233409549:4 2025-10-24T16:21:37.279436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-10-24T16:21:37.279501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-10-24T16:21:37.384162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-10-24T16:21:37.384197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-10-24T16:21:37.384211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-10-24T16:21:37.384225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-10-24T16:21:37.384231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-10-24T16:21:37.384237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-10-24T16:21:37.384245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7052: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-10-24T16:21:37.384250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-10-24T16:21:37.384256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:191: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-10-24T16:21:37.436601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:37.436646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], next retry at: 2020-09-18T18:06:00.000000Z 2025-10-24T16:21:37.436662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:58.701994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:58.702046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], next retry at: 2020-09-18T18:07:00.000000Z 2025-10-24T16:21:58.702065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:21:40.277198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:21:40.277218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:40.277223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:21:40.277235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:21:40.277240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:21:40.277243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:21:40.277275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:21:40.277286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:21:40.277486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:21:40.277590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:21:40.304719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:21:40.304737Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:40.309936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:21:40.309978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:21:40.310023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:21:40.311379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:21:40.311613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:21:40.311722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:40.311823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:21:40.312936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:40.312981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:40.313366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:40.313375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:40.313406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:40.313413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:40.313417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:40.313442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.314937Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:21:40.331414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:21:40.331510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.331574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:21:40.331581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:21:40.331644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:21:40.331656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:40.332615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:40.332655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:21:40.332712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.332720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:21:40.332724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:21:40.332728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:21:40.333167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.333176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:21:40.333186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:21:40.333470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.333477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:21:40.333482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:40.333487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:21:40.334044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:21:40.334490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:21:40.334589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:21:40.334791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:21:40.334817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:21:40.334826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:40.334906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:21:40.334913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:21:40.334946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:21:40.334959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:21:40.335546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:40.335556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 16:21:58.696535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:21:58.696843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:21:58.698106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:58.698147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:21:58.698390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435083, Sender [1:1753:3676], Recipient [1:1753:3676]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:58.698397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5293: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-10-24T16:21:58.698559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:21:58.698566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:21:58.698679Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [1:1753:3676], Recipient [1:1753:3676]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:58.698686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:21:58.698865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:21:58.698873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:21:58.698878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:21:58.698883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:21:58.699172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274399233, Sender [1:1791:3676], Recipient [1:1753:3676]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:58.699179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5401: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-10-24T16:21:58.699182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1753:3676] sender: [1:1812:2058] recipient: [1:15:2062] 2025-10-24T16:21:58.733142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [1:1811:3723], Recipient [1:1753:3676]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-10-24T16:21:58.733166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:21:58.733201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:21:58.733317Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 105us result status StatusSuccess 2025-10-24T16:21:58.733575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 4483 Memory: 156664 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |51.3%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,100,0] >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History |51.3%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |51.3%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 >> KqpOlapJson::OrFilterVariants[2,false,0,0,0,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,false,0,0,0,0.5] >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution |51.3%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |51.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |51.3%| [TS] {RESULT} ydb/tests/library/ut/import_test |51.3%| [TS] {RESULT} ydb/tests/olap/flake8 |51.3%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |51.3%| [TS] {RESULT} ydb/tests/solomon/reading/flake8 >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,0,0.5,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,0,0] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,1000000,0] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0,CATEGORY_BLOOM_FILTER] >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling >> KqpOlapJson::CompactionVariants[1,true,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,10,0,0] |51.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-ARROW >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,100,0.5] [GOOD] >> Charge::ByRowsLimits [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach >> KqpOlapJson::CompactionVariants[10,false,1024,10,1000000,0] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex |51.3%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |51.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBRowLocks::LockSurvivesCompactions [GOOD] >> DBRowLocks::LockOverCompactedErase [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,0,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,1000000,0] [GOOD] >> DBRowLocks::CommitTxAfterLockThenCompact [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,0,0] >> DBRowLocks::CommitLockThenCompactRowVersion [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> DBRowLocks::OverwriteLockThenCompact [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> DBRowLocks::LockOpenTxAndTxDataAccounting [GOOD] >> DBRowLocks::MultipleCommittedRowLocks [GOOD] >> DBRowLocks::LocksCommittedRemovedIteration [GOOD] >> DBRowLocks::LocksReplay [GOOD] >> DBRowLocks::LocksMvccCompact [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,1000000,0.5] >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive [GOOD] |51.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |51.4%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |51.5%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,10,1000000,0.5] >> KqpOlapJson::CompactionVariants[10,true,1024,0,1000000,0.5] [GOOD] |51.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> KqpOlapJson::CompactionVariants[10,true,1024,10,0,0] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,0,0.5] >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[10,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 14063, MsgBus: 9658 2025-10-24T16:21:52.906996Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824308777714460:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.907017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00133b/r3tmp/tmphw2KXO/pdisk_1.dat 2025-10-24T16:21:52.959807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:52.971966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824308777714436:2081] 1761322912906911 != 1761322912906914 2025-10-24T16:21:52.976474Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14063, node 1 2025-10-24T16:21:52.984099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.984114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.984116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.984161Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9658 2025-10-24T16:21:53.012875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:53.012909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:53.013861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:53.041513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:53.234884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.451360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824313072682406:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.451394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.451478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824313072682416:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.451489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.552897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.597442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.597508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.597635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.597653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.597736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.597757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.597778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.597799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.597811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.597879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.597925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.597938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.597994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824313072682550:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.599864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.599881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.599917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.599935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.599954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.599971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.599988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.600003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.600024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824313072682549:2326];tablet_id=72075186224037895;process=TT ... onDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482032Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482134Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482180Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482317Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482323Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482464Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482492Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:21:59.482616Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:21:59.487707Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824335793258948:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.487727Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.487756Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824335793258954:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.487758Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824335793258953:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.487765Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.488281Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:21:59.496408Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824335793258957:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:21:59.550118Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824335793259008:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:8/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::OrFilterVariants[2,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[2,false,0,0,100,0] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,100,0] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-ARROW >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,100,0.5] [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,0,0.5] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> TIterator::Single [GOOD] >> TIterator::SingleReverse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> DBRowLocks::LocksMvccCompact [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) [0:0:2:0:0:0:0] 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) [0:0:3:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) [0:0:4:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) [0:0:5:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) [0:0:6:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) [0:0:7:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) [0:0:8:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) [0:0:9:0:0:0:0] 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) [0:0:10:0:0:0:0] 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: adding part [0:0:1:0:0:0:0] data size (2.05MiB in total) adding group {0,0} PageId: 1359 RowCount: 24000 DataSize: 2046023 ErasedRowCount: 0 LevelCount: 5 IndexSize: 72027 added slice [0, 24000) data size (1.95MiB - 0B) => 1.95MiB adding part [0:0:2:0:0:0:0] data size (2.05MiB in total) adding group {0,0} PageId: 1358 RowCount: 24000 DataSize: 2045161 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71974 added slice [0, 24000) data size (1.95MiB - 0B) => 3.9MiB adding part [0:0:3:0:0:0:0] data size (2.03MiB in total) adding group {0,0} PageId: 1352 RowCount: 24000 DataSize: 2031359 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71656 added slice [0, 24000) data size (1.94MiB - 0B) => 5.84MiB adding part [0:0:4:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1353 RowCount: 24000 DataSize: 2035364 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71709 added slice [0, 24000) data size (1.94MiB - 0B) => 7.78MiB adding part [0:0:5:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1352 RowCount: 24000 DataSize: 2035051 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71656 added slice [0, 24000) data size (1.94MiB - 0B) => 9.72MiB adding part [0:0:6:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1356 RowCount: 24000 DataSize: 2039026 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71868 added slice [0, 24000) data size (1.94MiB - 0B) => 11.7MiB adding part [0:0:7:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1353 RowCount: 24000 DataSize: 2037176 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71709 added slice [0, 24000) data size (1.94MiB - 0B) => 13.6MiB adding part [0:0:8:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1356 RowCount: 24000 DataSize: 2041465 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71868 added slice [0, 24000) data size (1.95MiB - 0B) => 15.6MiB adding part [0:0:9:0:0:0:0] data size (2.04MiB in total) adding group {0,0} PageId: 1351 RowCount: 24000 DataSize: 2032862 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71603 added slice [0, 24000) data size (1.94MiB - 0B) => 17.5MiB adding part [0:0:10:0:0:0:0] data size (2.03MiB in total) adding group {0,0} PageId: 1349 RowCount: 24000 DataSize: 2031647 ErasedRowCount: 0 LevelCount: 5 IndexSize: 71497 added slice [0, 24000) data size (1.94MiB - 0B) => 19.4MiB building histogram with row resolution 24000, data size resolution 1.94MiB slicing part [0:0:1:0:0:0:0]: { {rows: [0, 23999] keys: [{7, 10}, {80038, 26687}]} } slicing node Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 => take adding node future events -1 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 slicing part [0:0:2:0:0:0:0]: { {rows: [0, 23999] keys: [{80041, 26688}, {160003, 53342}]} } slicing node Part: [0:0:2:0:0:0:0] PageId: 1358 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2045161 BeginKey: {80041, 26688} EndKey: {160003, 53342} State: 0 => take adding node future events -1 Part: [0:0:2:0:0:0:0] PageId: 1358 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2045161 BeginKey: {80041, 26688} EndKey: {160003, 53342} State: 0 slicing part [0:0:3:0:0:0:0]: { {rows: [0, 23999] keys: [{160006, 53343}, {240184, 80069}]} } slicing node Part: [0:0:3:0:0:0:0] PageId: 1352 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2031359 BeginKey: {160006, 53343} EndKey: {240184, 80069} State: 0 => take adding node future events -1 Part: [0:0:3:0:0:0:0] PageId: 1352 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2031359 BeginKey: {160006, 53343} EndKey: {240184, 80069} State: 0 slicing part [0:0:4:0:0:0:0]: { {rows: [0, 23999] keys: [{240187, 80070}, {320035, 106686}]} } slicing node Part: [0:0:4:0:0:0:0] PageId: 1353 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2035364 BeginKey: {240187, 80070} EndKey: {320035, 106686} State: 0 => take adding node future events -1 Part: [0:0:4:0:0:0:0] PageId: 1353 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2035364 BeginKey: {240187, 80070} EndKey: {320035, 106686} State: 0 slicing part [0:0:5:0:0:0:0]: { {rows: [0, 23999] keys: [{320038, 106687}, {400189, 133404}]} } slicing node Part: [0:0:5:0:0:0:0] PageId: 1352 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2035051 BeginKey: {320038, 106687} EndKey: {400189, 133404} State: 0 => take adding node future events -1 Part: [0:0:5:0:0:0:0] PageId: 1352 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2035051 BeginKey: {320038, 106687} EndKey: {400189, 133404} State: 0 slicing part [0:0:6:0:0:0:0]: { {rows: [0, 23999] keys: [{400192, 133405}, {479956, 159993}]} } slicing node Part: [0:0:6:0:0:0:0] PageId: 1356 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2039026 BeginKey: {400192, 133405} EndKey: {479956, 159993} State: 0 => take adding node future events -1 Part: [0:0:6:0:0:0:0] PageId: 1356 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2039026 BeginKey: {400192, 133405} EndKey: {479956, 159993} State: 0 slicing part [0:0:7:0:0:0:0]: { {rows: [0, 23999] keys: [{479959, 159994}, {559891, 186638}]} } slicing node Part: [0:0:7:0:0:0:0] PageId: 1353 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2037176 BeginKey: {479959, 159994} EndKey: {559891, 186638} State: 0 => take adding node future events -1 Part: [0:0:7:0:0:0:0] PageId: 1353 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2037176 BeginKey: {479959, 159994} EndKey: {559891, 186638} State: 0 slicing part [0:0:8:0:0:0:0]: { {rows: [0, 23999] keys: [{559894, 186639}, {639643, 213222}]} } slicing node Part: [0:0:8:0:0:0:0] PageId: 1356 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2041465 BeginKey: {559894, 186639} EndKey: {639643, 213222} State: 0 => take adding node future events -1 Part: [0:0:8:0:0:0:0] PageId: 1356 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2041465 BeginKey: {559894, 186639} EndKey: {639643, 213222} State: 0 slicing part [0:0:9:0:0:0:0]: { {rows: [0, 23999] keys: [{639649, 213224}, {719461, 239828}]} } slicing node Part: [0:0:9:0:0:0:0] PageId: 1351 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2032862 BeginKey: {639649, 213224} EndKey: {719461, 239828} State: 0 => take adding node future events -1 Part: [0:0:9:0:0:0:0] PageId: 1351 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2032862 BeginKey: {639649, 213224} EndKey: {719461, 239828} State: 0 slicing part [0:0:10:0:0:0:0]: { {rows: [0, 23999] keys: [{719467, 239830}, {799348, 266457}]} } slicing node Part: [0:0:10:0:0:0:0] PageId: 1349 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2031647 BeginKey: {719467, 239830} EndKey: {799348, 266457} State: 0 => take adding node future events -1 Part: [0:0:10:0:0:0:0] PageId: 1349 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2031647 BeginKey: {719467, 239830} EndKey: {799348, 266457} State: 0 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 24000 nextHistogramDataSize: 2037513 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 20 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 0 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 24000 nextHistogramDataSize: 2037513 closedRowCount: 0 closedDataSize: 0 openedRowCount: 24000 openedDataSize: 2046023 openedSortedByRowCount: 1 openedSortedByDataSize: 1 FutureEvents: 19 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 1 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 24000 nextHistogramDataSize: 2037513 closedRowCount: 0 closedDataSize: 0 openedRowCount: 24000 openedDataSize: 2046023 openedSortedByRowCount: 1 openedSortedByDataSize: 1 FutureEvents: 19 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 1 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 24000 nextHistogramDataSize: 2037513 closedRowCount: 24000 closedDataSize: 2046023 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 1 openedSortedByDataSize: 1 FutureEvents: 18 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 1359 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2046023 BeginKey: {7, 10} EndKey: {80038, 26687} State: 2 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 48000 nextHistogramDataSize: 4075026 closedRowCount: 24000 closedDataSize: 2046023 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 1 openedSortedByDataSize: 1 FutureEvents: 18 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 1358 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2045161 BeginKey: {80041, 26688} EndKey: {160003, 53342} State: 0 processing event IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 1358 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2045161 BeginKey: {80041, 26688} EndKey: {160003, 53342} State: 0 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 48000 nextHistogramDataSize: 4075026 closedRowCount: 24000 closedDataSize: 2046023 openedRowCount: 24000 openedDataSize: 2045161 openedSortedByRowCount: 2 openedSortedByDataSize: 2 FutureEvents: 17 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 1358 Level: 5 BeginRowId: 0 EndRowId: 24000 BeginDataSize: 0 EndDataSize: 2045161 BeginKey: {80041, 26688} E ... : 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 6 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 0 processing event IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 0 loading node by row count triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedRowCount: 0 openedRowCount: 167 nextHistogramRowCount: 0 loading node by data size triggerPart: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 closedDataSize: 0 openedDataSize: 13565 nextHistogramDataSize: 41 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 167 openedDataSize: 13565 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 5 currentKeyPointer: IsBegin: 1 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 iterating stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 0 closedDataSize: 0 openedRowCount: 167 openedDataSize: 13565 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 5 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 processing event IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 1 checking stats.RowCountHistogram: 0 stats.DataSizeHistogram: 0 nextHistogramRowCount: 0 nextHistogramDataSize: 41 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 4 currentKeyPointer: IsBegin: 0 Part: [0:0:1:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 13565 BeginKey: {7, 10} EndKey: {553, 192} State: 2 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 4 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 processing event IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 0 loading node by row count triggerPart: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 closedRowCount: 167 openedRowCount: 166 nextHistogramRowCount: 168 loading node by data size triggerPart: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 closedDataSize: 13565 openedDataSize: 13940 nextHistogramDataSize: 13566 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 166 openedDataSize: 13940 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 3 currentKeyPointer: IsBegin: 1 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 iterating stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 167 closedDataSize: 13565 openedRowCount: 166 openedDataSize: 13940 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 3 currentKeyPointer: IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 processing event IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 1 checking stats.RowCountHistogram: 1 stats.DataSizeHistogram: 1 nextHistogramRowCount: 168 nextHistogramDataSize: 13566 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 0 Part: [0:0:2:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 166 BeginDataSize: 0 EndDataSize: 13940 BeginKey: {556, 193} EndKey: {1087, 370} State: 2 iterating stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 2 currentKeyPointer: IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 processing event IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 0 loading node by row count triggerPart: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 closedRowCount: 333 openedRowCount: 167 nextHistogramRowCount: 334 loading node by data size triggerPart: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 closedDataSize: 27505 openedDataSize: 14115 nextHistogramDataSize: 27506 checking stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 167 openedDataSize: 14115 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 1 currentKeyPointer: IsBegin: 1 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 iterating stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 333 closedDataSize: 27505 openedRowCount: 167 openedDataSize: 14115 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 1 currentKeyPointer: IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 processing event IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 1 checking stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 500 closedDataSize: 41620 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 0 currentKeyPointer: IsBegin: 0 Part: [0:0:3:0:0:0:0] PageId: 0 Level: 0 BeginRowId: 0 EndRowId: 167 BeginDataSize: 0 EndDataSize: 14115 BeginKey: {1090, 371} EndKey: {1645, 556} State: 2 finished stats.RowCountHistogram: 2 stats.DataSizeHistogram: 2 nextHistogramRowCount: 334 nextHistogramDataSize: 27506 closedRowCount: 500 closedDataSize: 41620 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 0 openedSortedByDataSize: 0 FutureEvents: 0 Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> KqpOlapJson::FilterVariants[1,false,1,0,100,0] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,0,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,0,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,10,1000000,0.5] |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache_Actor::Evict_Passive [GOOD] Test command err: 0.2905 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.332001Z 00000.012 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.013 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.013 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} queued, type NKikimr::NSharedCache::TTxInitSchema 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 500b annex 0, ~{ } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.013 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 00000.013 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} commited cookie 1 for step 15 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{15, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} commited cookie 1 for step 16 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{16, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} commited cookie 1 for step 17 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{17, re ... :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #5 (done) Checking results#5 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310373Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 6 class Online from cache [ ] already requested [ ] to request [ 14 ] 2025-10-24T16:22:00.310378Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 13 ] owner [33:5:2052] 2025-10-24T16:22:00.310381Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #6 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #6 (done) Checking fetches#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] ... waiting for results #6 2025-10-24T16:22:00.310402Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 14 ] 2025-10-24T16:22:00.310405Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 14 ] cookie 6 2025-10-24T16:22:00.310409Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #6 (done) Checking results#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310424Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 7 class Online from cache [ ] already requested [ ] to request [ 15 ] 2025-10-24T16:22:00.310428Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 14 ] owner [33:5:2052] 2025-10-24T16:22:00.310432Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #7 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #7 (done) Checking fetches#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] ... waiting for results #7 2025-10-24T16:22:00.310453Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 15 ] 2025-10-24T16:22:00.310456Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 15 ] cookie 7 2025-10-24T16:22:00.310459Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #7 (done) Checking results#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310474Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 8 class Online from cache [ ] already requested [ ] to request [ 16 ] 2025-10-24T16:22:00.310480Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 15 ] owner [33:5:2052] 2025-10-24T16:22:00.310484Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #8 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #8 (done) Checking fetches#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] ... waiting for results #8 2025-10-24T16:22:00.310505Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 16 ] 2025-10-24T16:22:00.310508Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 16 ] cookie 8 2025-10-24T16:22:00.310511Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #8 (done) Checking results#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310526Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 9 class Online from cache [ ] already requested [ ] to request [ 17 ] 2025-10-24T16:22:00.310531Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 16 ] owner [33:5:2052] 2025-10-24T16:22:00.310534Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #9 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #9 (done) Checking fetches#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] ... waiting for results #9 2025-10-24T16:22:00.310555Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 17 ] 2025-10-24T16:22:00.310558Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 17 ] cookie 9 2025-10-24T16:22:00.310561Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #9 (done) Checking results#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310577Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 10 class Online from cache [ ] already requested [ ] to request [ 18 ] 2025-10-24T16:22:00.310581Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 17 ] owner [33:5:2052] 2025-10-24T16:22:00.310591Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #10 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #10 (done) Checking fetches#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for results #10 2025-10-24T16:22:00.310615Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 18 ] 2025-10-24T16:22:00.310618Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 18 ] cookie 10 2025-10-24T16:22:00.310621Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #10 (done) Checking results#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:22:00.310636Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 11 class Online from cache [ ] already requested [ ] to request [ 19 ] 2025-10-24T16:22:00.310641Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 18 ] owner [33:5:2052] 2025-10-24T16:22:00.310644Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #11 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #11 (done) Checking fetches#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] ... waiting for results #11 2025-10-24T16:22:00.310665Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 19 ] 2025-10-24T16:22:00.310668Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 19 ] cookie 11 2025-10-24T16:22:00.310671Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #11 (done) Checking results#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] ... waiting for NActors::TEvents::TEvWakeup 2025-10-24T16:22:00.310685Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:860: Wakeup DoGCManual 2025-10-24T16:22:00.310688Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) ... waiting for NActors::TEvents::TEvWakeup 2025-10-24T16:22:00.310697Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:860: Wakeup DoGCManual 2025-10-24T16:22:00.310702Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:951: Drop page collection [1:0:256:0:0:0:1] pages [ 1 2 3 ] owner [33:5:2052] 2025-10-24T16:22:00.310705Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,0,0] [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,1000,0] >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,0,0] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,0,0] >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,100,0] >> TPartBtreeIndexIteration::OneNode >> KqpOlapJson::FilterVariantsCount[10,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,0,1000000,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,0,0] >> KqpOlapJson::OrFilterVariants[2,false,0,0,100,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,false,0,0,100,0.5] >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,100,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,100,0] >> KqpOlap::SimpleLookupOlap >> KqpOlapJson::CompactionVariants[1,true,1,10,0,0] [GOOD] |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/oltp_workload >> KqpOlapJson::CompactionVariants[1,true,1,10,0,0.5] >> KqpOlapJson::CompactionVariants[10,false,1024,10,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,10,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,100,0.5] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,100,0.5] >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse |51.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,0,0] >> KqpOlapJson::QuotedFilterVariants[10,true,0,0,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,100,0] >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0,BLOOM_FILTER] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,1,0,0,0.5] >> KqpOlapJson::FilterVariants[1,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,0,1000000,0.5] >> KqpOlapJson::CompactionVariants[10,true,1024,10,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,10,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,0,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,1000000,0] |51.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> KqpOlapJson::FilterVariantsCount[10,true,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,0,0] >> KqpOlapJson::OrFilterVariants[2,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[2,false,0,0,1000000,0] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_Limit2 [GOOD] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,100,0.5] |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/workload_log >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,0,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,100,0] >> TIterator::MixedReverse [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,1000000,0] >> TIterator::Serial >> KqpOlapJson::SimpleVariants[10,false,1024,0,0,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,0,0.5] [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,100,0.5] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,0,0.5] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::CompactionVariants[1,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,1000000,0.5] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-ARROW >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,0,0] >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,100,0] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,100,0] >> KqpOlap::SimpleLookupOlap [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,10,0,0] >> KqpOlap::ScanQueryOltpAndOlap >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,0,0] >> KqpOlapJson::OrFilterVariants[2,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,false,0,0,1000000,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,1000000,0.5] >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,100,0] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,1000000,0] [GOOD] >> TLegacy::IndexIter [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,1000000,0.5] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,1000000,0] >> KqpOlapJson::QuotedFilterVariants[10,true,0,0,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,true,0,0,0,0.5] >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-YDB_VALUE >> KqpOlapJson::FilterVariantsCount[10,true,1,10,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,0,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,100,0] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-CSV >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |51.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload_kv >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,1000000,0] >> TPart::ForwardEnv [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,1000,0] [GOOD] >> TPart::ForwardEnvColumnGroups >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,1000,0.5] >> KqpOlapJson::CompactionVariants[1,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,10,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,100,0] >> KqpOlapJson::CompactionVariants[10,false,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,1000,0,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,100,0.5] >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions [GOOD] >> TVersions::Wreck2Reverse [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TVersions::Wreck1 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1024,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,100,0] >> KqpOlap::ScanQueryOltpAndOlap [GOOD] >> KqpOlap::SelectLimit1ManyShards >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,100,0.5] >> KqpOlapJson::SimpleVariants[10,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,0,0] >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,1000000,0] >> KqpOlapJson::OrFilterVariants[2,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[10,true,1024,1000,1000000,0] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> KqpOlapJson::QuotedFilterVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,0,0] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,100,0] >> KqpOlapJson::CompactionVariants[10,true,1024,10,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,10,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,100,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,1000000,0.5] >> KqpOlapJson::FilterVariants[1,false,1,10,0,0] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,10,0,0.5] >> KqpOlapJson::QuotedFilterVariants[10,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,0,0] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TPart::CutKeys_CutUtf8String [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.246779Z 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.004 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.004 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.004 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.004 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{4, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{5, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{6, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{7, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{8, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{9, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{10, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{11, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{12, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{13, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{14, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.005 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memor ... {[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{11} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 10 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{3} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{3} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{3} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{3} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 280b 2r} data 904b + FlatIndex{3} Label{3 rev 3, 401b} 3 rec | Page Row Bytes (String) | 0 0 140b {____________________________________________________________________________________________________cccddd} | 1 1 140b {____________________________________________________________________________________________________cd} | 1 1 140b {____________________________________________________________________________________________________cddddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 280 ErasedRowCount: 0} Label{13 rev 1, 204b} | PageId: 0 RowCount: 1 DataSize: 140 ErasedRowCount: 0 | > {____________________________________________________________________________________________________cd} | PageId: 1 RowCount: 2 DataSize: 280 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x89\x91} | 1 1 49b {abc\xF0\x9F\x89\x91\xF0\x9F\x89\x91\xF0\x9F\x89\x91} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x89\x91} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 342b + FlatIndex{3} Label{3 rev 3, 120b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 46b {abc\xE2\x9A\xAB} | 1 1 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xE2\x9A\xAB} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 344b + FlatIndex{3} Label{3 rev 3, 121b} 3 rec | Page Row Bytes (Utf8) | 0 0 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 46 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 332b + FlatIndex{3} Label{3 rev 3, 115b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {abcxxx} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 326b + FlatIndex{3} Label{3 rev 3, 112b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 40b {abcx} | 1 1 40b {abcxxx} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abcx} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x96} | 1 1 49b {abc\xF0\x9F\x98\x96\xF0\x9F\x98\x96\xF0\x9F\x98\x96} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x96} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-COLUMNSHARD-CSV [GOOD] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-ARROW >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-CSV >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,1000,0,0.5] >> KqpBoolColumnShard::TestAggregation-SCAN_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-ARROW ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[2,true,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24330, MsgBus: 29513 2025-10-24T16:21:52.919109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824304793278485:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.919129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001273/r3tmp/tmp8CbQ6T/pdisk_1.dat 2025-10-24T16:21:52.963921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:52.977014Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:52.977176Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824304793278455:2081] 1761322912918745 != 1761322912918748 TServer::EnableGrpc on GrpcPort 24330, node 1 2025-10-24T16:21:52.985868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.985881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.985882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.985917Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29513 2025-10-24T16:21:53.024852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:53.024887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:53.025877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:53.048776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:53.200702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.450056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309088246426:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.450085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.450208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309088246436:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.450231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.557327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.591160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.591214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.591276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.591295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.591313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.591333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.591352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.591365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.591379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.591401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.591419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.591437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.591452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824309088246500:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.591709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.591762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.591832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.592057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.592086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.592112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.592135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.592156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.592176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309088246499:2323];tablet_id=72075186224037888;process= ... sage: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:02.832138Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:02.832138Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:02.841615Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:02.843464Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710664;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:02.843476Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710664;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:02.850000Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:02.855988Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:02.855987Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:22:02.873581Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:02.873581Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:22:02.893079Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=26;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:03.255416Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:22:03.706074Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824348677385878:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:03.706077Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824348677385879:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:03.706082Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824348677385879:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:03.706088Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824348677385878:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:03.706212Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824348677385878:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322922751;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:03.706229Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824348677385878:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322922765;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:03.706239Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824348677385879:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322922751;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:03.706252Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824348677385879:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322922765;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:03.706318Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824348677385878:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322922891;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322922891 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:03.706319Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824348677385879:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322922891;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322922891 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::FilterVariantsCount[10,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,100,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,100,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[1,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,0,1000000,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 64790, MsgBus: 7115 2025-10-24T16:21:58.944032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824331129993391:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:58.944069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001271/r3tmp/tmp3lcGw2/pdisk_1.dat 2025-10-24T16:21:58.983939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.997547Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:58.997747Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824331129993288:2081] 1761322918943332 != 1761322918943335 TServer::EnableGrpc on GrpcPort 64790, node 1 2025-10-24T16:21:59.003321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.003334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.003336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.003376Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7115 TClient is connected to server localhost:7115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:59.057360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.057388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:59.058301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:21:59.058390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:59.234781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:59.514584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824335424961259:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.514614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.514661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824335424961268:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.514673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.566737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:59.593404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.593450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.593495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.593522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.593547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.593574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.593602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.593627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.593652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:59.593677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:59.593702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:59.593816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:59.593842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824335424961324:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:59.595050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:59.595065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:59.595079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:59.595084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:59.595136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:59.595145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:59.595156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:59.595165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:59.595177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:59.595186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline ... NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:03.948212Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:22:03.948221Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:22:03.948244Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:22:03.948253Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:03.948267Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:03.948276Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:03.948284Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:03.948289Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:03.948295Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:03.948299Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:03.948314Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:03.948323Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:03.948334Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:03.948342Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:03.950104Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824353083250707:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=57145589971200;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322923950;max=18446744073709551615;plan=0;src=[7:7564824353083250332:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:03.954760Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:03.954780Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:03.954783Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:03.959358Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824353083250780:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.959374Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.959404Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824353083250782:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.959414Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.961273Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:03.968586Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:03.971934Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824353083250812:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.971951Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.972026Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824353083250814:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.972032Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.974783Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:03.982513Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:22:03.985612Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824353083250707:2323];path_id=1000000889;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:03.985628Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824353083250707:2323];path_id=1000000889;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:03.985631Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824353083250707:2323];path_id=1000000889;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:03.985663Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824353083250707:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=93b1a14e-b0f511f0-958670cb-75163ebc;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:03.985696Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824353083250707:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;tablet_id=72075186224037888;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037888 in longTx ydb://long-tx/01k8bgdxyg71qpqs9jb2xjh9kk?node_id=7 >> KqpOlapJson::CompactionVariants[1,false,1,10,0,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,100,0.5] >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,1000000,0] >> KqpOlapJson::SimpleVariants[10,false,1,1000,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[2,false,0,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 20960, MsgBus: 32030 2025-10-24T16:21:51.384064Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824303517297609:2062];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.384081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f5b/r3tmp/tmpcKQE7O/pdisk_1.dat 2025-10-24T16:21:51.435914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:51.444166Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:51.444489Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824303517297587:2081] 1761322911383953 != 1761322911383956 TServer::EnableGrpc on GrpcPort 20960, node 1 2025-10-24T16:21:51.453870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.453880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.453881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.453923Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32030 2025-10-24T16:21:51.490016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.490046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.490772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.512394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:51.514557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:51.608748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:51.888117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824303517298262:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.888144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.888209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824303517298272:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.888220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:51.933021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:51.948166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.948334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.948429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.948460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.948480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.948505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.948528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.948619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.948703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:51.948728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.948749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:51.948768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:51.948787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824303517298343:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:51.949073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.949084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.949119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.949140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.949159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.949180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.949200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.949309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824303517298333:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Re ... rPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:03.464858Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824355799428071:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.464876Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.464943Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824355799428077:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.464957Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824355799428076:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.464967Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.465748Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:03.473291Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824355799428080:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:22:03.545930Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824355799428131:2494] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:03.564633Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:03.565147Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:22:03.596266Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:22:03.614804Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=24;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:03.957232Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:22:04.420095Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:04.420103Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:04.420109Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:04.420110Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:04.420114Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:04.420115Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:04.420253Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923465;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:04.420269Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923465;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:04.420274Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923479;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:04.420279Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923493;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:04.420280Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923479;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:04.420285Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322923493;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:04.420435Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824355799427860:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322923507;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322923507 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:04.420443Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824355799427875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322923507;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322923507 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,100,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,0,0.5] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-DATASHARD-CSV [GOOD] >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-ARROW >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-ARROW >> KqpOlapJson::OrFilterVariants[10,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,1000,0,0.5] >> KqpOlapJson::FilterVariants[1,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,10,100,0] |51.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,10,0,0.5] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,100,0] >> KqpOlapJson::BrokenJsonWriting[1,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,1000000,0.5] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate |51.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::CompactionVariants[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,100,0.5] >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,100,0.5] >> KqpOlapJson::CompactionVariants[10,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,1000,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 16806, MsgBus: 64523 2025-10-24T16:21:59.135429Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824335078357193:2139];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.135447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:59.141906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132e/r3tmp/tmpkKgb92/pdisk_1.dat 2025-10-24T16:21:59.198444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.198471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:59.199527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:59.208537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824335078357092:2081] 1761322919132565 != 1761322919132568 2025-10-24T16:21:59.213281Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16806, node 1 2025-10-24T16:21:59.242713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.242729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.242731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.242781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:59.334034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64523 TClient is connected to server localhost:64523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:59.459772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:00.073286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339373325060:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.073310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.075987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339373325070:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.076021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.107263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.119753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.119943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.119980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.120002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.120022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.120048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.120085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.120108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.120129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.120151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.120173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.120195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.120215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824339373325131:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:00.122974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.123017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.123062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.123083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.123108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.123129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.123150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.123171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.123187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824339373325146:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execut ... od=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322925198;max=18446744073709551615;plan=0;src=[6:7564824356753058146:2161];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200844Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200848Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200857Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200859Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200860Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:05.200862Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:05.206569Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025892:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.206587Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.206627Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025894:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.206637Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.208511Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:05.214730Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.214730Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`true`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:05.220952Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025928:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.220971Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.221045Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025930:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.221056Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.227122Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:05.229425Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.229429Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:05.239285Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025965:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.239303Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.239455Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025970:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.239475Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824361048025971:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.239533Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.240092Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:05.243311Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824361048025974:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:05.317056Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824361048026025:2465] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::OrFilterVariants[10,true,1024,1000,1000000,0] [GOOD] Test command err: Trying to start YDB, gRPC: 18104, MsgBus: 18975 2025-10-24T16:21:58.590510Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824334508571045:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:58.590678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b52/r3tmp/tmpbuDFso/pdisk_1.dat 2025-10-24T16:21:58.636902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.637051Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:58.637239Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824334508571011:2081] 1761322918589918 != 1761322918589921 TServer::EnableGrpc on GrpcPort 18104, node 1 2025-10-24T16:21:58.643426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:58.643436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:58.643438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:58.643467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18975 TClient is connected to server localhost:18975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:58.693309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:58.693340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:58.694403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:58.699480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:58.815399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:58.967377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824334508571689:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.967402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.967461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824334508571699:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.967470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.010951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:59.021148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.021211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.021254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.021274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.021294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.021315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.021342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.021362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.021381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:59.021400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:59.021423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:59.021441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:59.021466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824338803539051:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:59.021918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824338803539050:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.021946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824338803539050:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.021979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824338803539050:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.021981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:59.021990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:59.022000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824338803539050:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.022003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:59.022007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:59.022021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824338803539050:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.022024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract ... T16:22:05.028591Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.028699Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.028893Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.028899Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.029066Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.029067Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.029206Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.029216Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:05.029336Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:05.041038Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824364618448851:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.041065Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.041238Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824364618448856:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.041251Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824364618448857:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.041269Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.042031Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:05.043892Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824364618448860:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:05.128547Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824364618448911:2687] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE (JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3") OR (JSON_VALUE(Col2, "$.b") = "b1" AND JSON_VALUE(Col2, "$.c") = "c1") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:14/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleVariants[10,false,1,1000,100,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,1000000,0.5] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,1000000,0.5] >> Backup::SnapshotLargeData [GOOD] >> Backup::SnapshotSchema [GOOD] >> Backup::ChangelogData [GOOD] >> Backup::ChangelogLargeData >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,1000000,0.5] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,0,0] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> KqpOlapJson::BloomMixIndexesVariants[true,false,0,0,0,0,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,100,0.5] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,1000000,0] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-YDB_VALUE >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[1,true,1,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 32477, MsgBus: 3270 2025-10-24T16:21:52.863932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824305349434883:2064];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.863949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00133e/r3tmp/tmpmdMbht/pdisk_1.dat 2025-10-24T16:21:52.909763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 32477, node 1 2025-10-24T16:21:52.932658Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824305349434859:2081] 1761322912863796 != 1761322912863799 2025-10-24T16:21:52.935504Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:52.944056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.944068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.944070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.944138Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3270 2025-10-24T16:21:52.976889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:52.976915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:52.978588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:53.013534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:53.016431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:53.158802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.372749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309644402830:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.372779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.372917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309644402840:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.372926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.410857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.432399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.432469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.432524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.432538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.432552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.432565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.432585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.432598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.432684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.432700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.432713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.432725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.432752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824309644402893:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.433226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:53.433234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:53.433243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:53.433249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:53.433262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:53.433265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:53.433272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:53.433275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:53.433280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:53.433283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer ... lt not found or you don't have access permissions } 2025-10-24T16:22:04.651023Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:04.662141Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:04.662230Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824358709756799:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.662249Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.662299Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824358709756808:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.662304Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.666073Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:04.673397Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824358709756832:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.673426Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.673430Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824358709756837:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.673458Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824358709756839:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.673470Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.674150Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:04.676743Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824358709756841:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:22:04.730637Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824358709756892:2462] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:04.747732Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:22:04.772445Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:22:04.794071Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:22:05.014346Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:05.605306Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:05.605320Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:05.605326Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:05.605497Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322924662;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:05.605514Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322924683;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:05.605519Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322924697;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:05.605748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824358709756662:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322924711;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322924711 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::CompactionVariants[10,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,0,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,1000,0,0.5] [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> KqpOlapJson::FilterVariants[1,false,1,10,100,0] [GOOD] >> KqpOlapJson::FilterVariants[1,false,1,10,100,0.5] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,0,1000,0.5] [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,0,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,1000000,0] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-CSV >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,100,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,10,0,0.5] [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,0,0] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0.5,BLOOM_FILTER] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,100,0] >> IndexBuildTestReboots::BaseCaseWithDataColumns [GOOD] >> IndexBuildTestReboots::BaseCaseWithDataColumnsUniq >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,10,0,0.5] >> KqpOlapJson::SimpleVariants[10,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,1000000,0.5] [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[10,true,1024,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22757, MsgBus: 21804 2025-10-24T16:21:52.856563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824305096574421:2071];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:52.856586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00133f/r3tmp/tmpOIqWrH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22757, node 1 2025-10-24T16:21:52.911831Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:52.911887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:52.912240Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824305096574380:2081] 1761322912855957 != 1761322912855960 2025-10-24T16:21:52.918362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:52.918373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:52.918375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:52.918418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21804 2025-10-24T16:21:52.960711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:52.960742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:52.962918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:52.986511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:52.992711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:53.183293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.359076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309391542350:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.359108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.359313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824309391542360:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.359322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.445519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.490645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.490790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.490855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.490884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.490904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.491017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.491134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.491156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.491177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.491200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.491406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.491428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.491500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.491533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564824309391542480:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:53.491540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.491584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.491613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.491633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.491654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.491670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.491693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824309391542481:2325];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=R ... SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:06.085201Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:06.085210Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:06.085214Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:06.085285Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925138;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:06.085298Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925159;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:06.085303Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925173;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:06.085342Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824361943720237:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322925194;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322925194 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:06.086342Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:06.086351Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:06.086356Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:06.086496Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925138;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:06.086509Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925159;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:06.086525Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925173;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:06.086613Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824361943720356:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322925194;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322925194 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:06.086828Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:06.086831Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:06.086835Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:06.086922Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925138;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:06.086929Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925159;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:06.086933Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925173;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:06.086978Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824361943720240:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322925194;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322925194 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[2,false,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 2460, MsgBus: 11552 2025-10-24T16:22:00.417467Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824341238337967:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.417795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132c/r3tmp/tmpx1QF94/pdisk_1.dat 2025-10-24T16:22:00.485293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:00.485913Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:00.487929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824341238337930:2081] 1761322920417068 != 1761322920417071 TServer::EnableGrpc on GrpcPort 2460, node 1 2025-10-24T16:22:00.500867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.500881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.500883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.500921Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:00.525020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.525055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.526148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11552 TClient is connected to server localhost:11552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.618451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:00.696236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:01.131816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824345533305906:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.131883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.132012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824345533305916:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.132028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.172730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:01.183617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.183692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.183738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.183767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.183792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.183818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.183845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:01.183886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:01.183910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:01.183933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:01.183959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:01.183987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:01.184011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824345533305992:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:01.184533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:01.184552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:01.184569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:01.184574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:01.184597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:01.184605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:01.184618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:01.184627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:01.184635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:01.184643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:01.184649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=T ... 6;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322926170;max=18446744073709551615;plan=0;src=[6:7564824362494574977:2154];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172655Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172670Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172694Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172696Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172713Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.172724Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:06.180178Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542739:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.180200Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.180236Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542741:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.180241Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.182878Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.185204Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.185329Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`true`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:06.196832Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542775:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.196874Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.197019Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542783:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.197039Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.197191Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.198936Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.198938Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:06.206858Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542812:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.206887Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.206999Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542818:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.207012Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824366789542817:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.207017Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.207689Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.209329Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824366789542821:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:06.270480Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824366789542872:2468] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[1,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,1000,0,0.5] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,100,0] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,1000000,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,100,0.5] [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[10,true,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 10346, MsgBus: 9434 2025-10-24T16:22:00.223526Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824342681197861:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.223551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126e/r3tmp/tmpmOjeBg/pdisk_1.dat 2025-10-24T16:22:00.269148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 10346, node 1 2025-10-24T16:22:00.292070Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:00.296575Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824342681197836:2081] 1761322920223323 != 1761322920223326 2025-10-24T16:22:00.318786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.318799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.318801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.318844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:00.327590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.327873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.328897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9434 TClient is connected to server localhost:9434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.400408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:00.454379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:00.904655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824342681198516:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.904684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.904863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824342681198526:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.904878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.904923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824342681198528:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.904935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.969273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:01.026681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.026984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.027060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.027090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.027119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.027293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.027326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:01.027348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:01.027478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:01.027508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:01.027531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:01.027682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:01.027711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824342681198687:2331];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:01.032943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.032984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.033043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.033061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.033217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.033234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688:2332];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.033250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824342681198688 ... nored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378409Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378492Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378562Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378648Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378666Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378773Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378790Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.378877Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:06.384378Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824368936976818:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.384396Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.384490Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824368936976823:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.384498Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824368936976824:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.384506Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.385204Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.392726Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824368936976827:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:06.449953Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824368936976878:2683] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 31384, MsgBus: 13565 2025-10-24T16:21:59.636592Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824338794666404:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.636621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f58/r3tmp/tmpq0p7gI/pdisk_1.dat 2025-10-24T16:21:59.774974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.775031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:59.780047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:59.784071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:59.792961Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824338794666277:2081] 1761322919634157 != 1761322919634160 2025-10-24T16:21:59.795945Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31384, node 1 2025-10-24T16:21:59.805783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.805794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.805796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.805830Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13565 TClient is connected to server localhost:13565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:59.976658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:59.980636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:00.022631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:00.357371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824343089634255:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.357398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.357491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824343089634265:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.357500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.432926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.445069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.445118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.445171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.445192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.445215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.445237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.445256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.445280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.445300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.445322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.445342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.445361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.445381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824343089634316:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:00.449820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:00.449838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:00.449853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:00.449859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:00.449894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:00.449900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:00.449913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:00.449920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:00.449929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:00.449935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;ev ... MNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:06.378100Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.378113Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:06.378118Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:06.378125Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:06.378130Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:06.378145Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:06.378150Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:06.378158Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:06.378161Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:06.382078Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824366811464885:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=127312411850064;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322926381;max=18446744073709551615;plan=0;src=[7:7564824362516497206:2145];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.383065Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.383084Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.383087Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:06.390297Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811464957:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.390315Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.390402Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811464960:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.390411Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.392333Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.394785Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:06.404692Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811464989:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.404715Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.404831Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811464992:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.404837Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.407220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.409265Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:22:06.414356Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811465022:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.414388Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.414447Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811465027:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.414555Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824366811465028:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.414595Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.415199Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.421234Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824366811465031:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:06.491277Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824366811465082:2442] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-CSV >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,100,0] >> IndexBuildTestReboots::BaseCase [GOOD] >> IndexBuildTestReboots::BaseCaseUniq >> KqpOlapJson::FilterVariantsCount[2,false,1,10,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,0,0.5] >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-CSV ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[1,false,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12677, MsgBus: 21628 2025-10-24T16:21:53.534664Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824310217517262:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:53.534819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001333/r3tmp/tmpsQlN2t/pdisk_1.dat 2025-10-24T16:21:53.609329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:53.617233Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:53.617496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824310217517233:2081] 1761322913533993 != 1761322913533996 TServer::EnableGrpc on GrpcPort 12677, node 1 2025-10-24T16:21:53.636578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:53.636594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:53.636596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:53.636646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:53.645111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:53.645178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:53.646154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21628 TClient is connected to server localhost:21628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:53.725487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:53.789146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:54.137997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824314512485205:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.138036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.138131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824314512485215:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.138154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.170013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:54.182031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:54.182100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:54.182230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:54.182283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:54.182311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:54.182337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:54.182364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:54.182390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:54.182416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:54.182477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:54.182504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:54.182530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:54.182556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824314512485268:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:54.183429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:54.183445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:54.183462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:54.183468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:54.183490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:54.183500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:54.183511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:54.183521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:54.183530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:54.183539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:54.183546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... re/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:05.457061Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:05.463292Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824363730315017:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.463318Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.463364Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824363730315019:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.463369Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.466225Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:05.468250Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:05.473635Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824363730315050:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.473657Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.473701Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824363730315056:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.473712Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824363730315055:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.473717Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:05.474334Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:05.481340Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824363730315059:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:22:05.551831Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824363730315110:2460] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:05.572644Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:22:05.597984Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:22:05.621301Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:06.024128Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:22:06.429146Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:06.429163Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:06.429169Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:06.429342Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925474;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:06.429373Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925488;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:06.429393Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322925502;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:06.429501Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824363730314881:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322925516;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322925516 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 8478, MsgBus: 15902 2025-10-24T16:21:58.820176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824332033377400:2144];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:58.820244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001272/r3tmp/tmpJ3DArz/pdisk_1.dat 2025-10-24T16:21:58.852950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.861745Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824332033377285:2081] 1761322918819276 != 1761322918819279 2025-10-24T16:21:58.864545Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8478, node 1 2025-10-24T16:21:58.869641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:58.869653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:58.869654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:58.869684Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15902 TClient is connected to server localhost:15902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:58.923513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:58.923545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:58.924695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:58.929368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:59.130080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:59.426056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336328345261:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.426082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.429323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336328345271:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.429342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.524727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:59.562389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.562578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.562578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.562633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.562653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.562670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.562765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.562781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.562904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.562919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:59.562935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:59.562951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.563005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.563016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:59.563031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:59.563033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.563056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.563081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.563110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.563116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336328345383:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:59.563135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.563158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824336328345384:2324];tablet_id=7207518622403789 ... :22:06.455382Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.455569Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.455569Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.455718Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.455832Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.455904Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.456016Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.456062Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:06.456174Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:22:06.460955Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367514871208:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.460971Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.460996Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367514871215:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.461001Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.461038Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367514871213:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.461744Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.468472Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824367514871217:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:06.554589Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824367514871268:2686] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 25516, MsgBus: 23275 2025-10-24T16:21:59.556300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824338681178634:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.556508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001270/r3tmp/tmpj0N9WB/pdisk_1.dat 2025-10-24T16:21:59.645059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:59.668151Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:59.675477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.675593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25516, node 1 2025-10-24T16:21:59.678368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:59.707621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.707633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.707635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.707664Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23275 2025-10-24T16:21:59.825946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:59.886303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:59.888904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:00.304947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824342976146566:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.304998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.305382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824342976146576:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.305412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.391688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.408701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.408776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.408822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.408851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.408879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.408903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.408931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.408955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.408976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.408997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.409027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.409049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.409072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824342976146627:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:00.410080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:00.410092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:00.410110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:00.410117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:00.410142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:00.410148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:00.410161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:00.410166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:00.410174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:00.410179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:00.410185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TT ... 24T16:22:06.612452Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.612459Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:06.612465Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:06.612473Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:06.612479Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:06.612495Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:06.612502Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:06.612514Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:06.612519Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:06.614173Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824365216672610:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=20071372645936;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322926614;max=18446744073709551615;plan=0;src=[7:7564824365216672252:2151];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.616318Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.616335Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.616338Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:06.624334Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.624778Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672682:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.624798Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.624910Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672694:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.624928Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.633549Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:06.646273Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.646590Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672714:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.646650Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.646763Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672726:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.646768Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.648171Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:22:06.660221Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672747:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.660242Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.660387Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672752:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.660396Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824365216672753:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.660441Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.661187Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.663504Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:06.663566Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824365216672756:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:06.756614Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824365216672807:2437] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,100,0] >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,0,0] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows |51.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24015, MsgBus: 10268 2025-10-24T16:21:59.798204Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824337359580051:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.798491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f55/r3tmp/tmpfNdFF5/pdisk_1.dat 2025-10-24T16:21:59.910341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:59.912241Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:59.912985Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824337359580018:2081] 1761322919797908 != 1761322919797911 TServer::EnableGrpc on GrpcPort 24015, node 1 2025-10-24T16:21:59.922337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.922358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:59.923332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:59.972139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.972151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.972153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.972194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10268 TClient is connected to server localhost:10268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.100696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:00.151248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:00.483958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341654547994:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.483987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.484156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341654548004:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.484165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.532628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.549304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.549432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.549508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.549560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.549592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.549623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.549655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.549709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.549758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.549792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.549824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.549847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.549887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341654548057:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:00.551197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:00.551221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:00.551238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:00.551244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:00.551267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:00.551278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:00.551304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:00.551315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:00.551324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:00.551335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:00.551342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... scription=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.804396Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:06.804401Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:06.804407Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:06.804413Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:06.804428Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:06.804433Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:06.804444Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:06.804448Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:06.806097Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824367109348523:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=126649896624128;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322926806;max=18446744073709551615;plan=0;src=[7:7564824367109348181:2165];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.807473Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.807488Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:06.807491Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:06.815045Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348597:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.815064Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.815142Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348600:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.815148Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.817087Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.819330Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:06.835972Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348629:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.836000Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.836229Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:06.836346Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348636:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.836370Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.837703Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:06.843571Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348662:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.843600Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.843617Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348667:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.843645Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824367109348669:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.843651Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.844319Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.846017Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824367109348670:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:06.933057Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824367109348722:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[1,1,1],\"size\":[6,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[3,1,1]},\"columns\":{\"accessor\":[1],\"size\":[15],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[3]},\"o_size\":672,\"h_size\":760,\"c_size\":280}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-COLUMNSHARD-CSV [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-ARROW >> KqpOlapJson::FilterVariants[1,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,0,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,100,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,1000000,0] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,1000000,0] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[10,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,100,0.5] >> KqpOlapJson::SimpleExistsVariants[1,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,true,1,0,0,0.5] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,100,0] >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-CSV [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> KqpOlapJson::SimpleVariants[10,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,1000000,0.5] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariants[1,false,1,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 11369, MsgBus: 15338 2025-10-24T16:21:59.067551Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824336435990539:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.068982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132f/r3tmp/tmpomni0b/pdisk_1.dat 2025-10-24T16:21:59.138758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:59.147090Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:59.147451Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824336435990511:2081] 1761322919067320 != 1761322919067323 TServer::EnableGrpc on GrpcPort 11369, node 1 2025-10-24T16:21:59.177507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:59.177519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:59.177521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:59.177564Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:21:59.177619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:59.177638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:59.178741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15338 TClient is connected to server localhost:15338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:21:59.347035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:59.360577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:59.797450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336435991186:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.797479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.797530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336435991196:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.797535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.908849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:59.942419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.942763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.942853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.942876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.942983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.943005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.943028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.943049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.943577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:59.943677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:59.943786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:59.943808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:59.943829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336435991251:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:59.944866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:59.944880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:59.944894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:59.944901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:59.944924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:59.944950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:59.944967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:59.944976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:59.944984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:59.944993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:59.945000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... ract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:07.087723Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.087732Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:07.087736Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:07.089167Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824371955919627:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=53150121029584;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322927089;max=18446744073709551615;plan=0;src=[7:7564824367660951967:2150];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.090727Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.090745Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.090748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:07.096157Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919699:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.096175Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.096293Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919701:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.096303Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.098411Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.104693Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:07.109905Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919731:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.109941Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.110048Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919733:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.110070Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.113119Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.119313Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:07.125063Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919764:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.125084Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.125102Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919769:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.125114Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371955919771:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.125118Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.126000Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.134395Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824371955919773:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:07.211159Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824371955919824:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".c") = "1" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".e.\"c.a\"") = "2" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:1/0/0 HEADER:0/0/0 2025-10-24T16:22:07.702349Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,100,0.5] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-CSV [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[10,false,1024,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1144, MsgBus: 11093 2025-10-24T16:21:54.190618Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824314131366553:2162];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:54.190641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:21:54.194693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001331/r3tmp/tmphk3MRC/pdisk_1.dat 2025-10-24T16:21:54.229790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:54.229818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:54.230726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:54.234973Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:54.235146Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824314131366428:2081] 1761322914188797 != 1761322914188800 TServer::EnableGrpc on GrpcPort 1144, node 1 2025-10-24T16:21:54.248033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:54.248047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:54.248049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:54.248086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11093 TClient is connected to server localhost:11093 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:21:54.325185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:54.338929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:21:54.569091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824314131367099:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.569120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.569240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824314131367109:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.569256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:54.601298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:54.622483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:54.622528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:54.622556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:54.622587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:54.622605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:54.622626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:54.622643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:54.622662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:54.622686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:54.622703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:54.622717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:54.622733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:54.622749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824314131367226:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:54.623131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:54.623141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:54.623150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:54.623153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:54.623164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:54.623170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:54.623178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:54.623184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:54.623189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:54.623192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:54.623196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Exec ... 5186224037894;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322926601;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322926601 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:07.511928Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:07.511931Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:07.511933Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:07.512003Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926559;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:07.512011Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926573;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:07.512016Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926587;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:07.512055Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824368460258361:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322926601;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322926601 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:07.512253Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:07.512255Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:07.512257Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:07.512306Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926559;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:07.512311Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926573;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:07.512313Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926587;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:07.512337Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824368460258336:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322926601;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322926601 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:07.513381Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824368460258337:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:07.513499Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824368460258337:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926559;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:07.513508Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824368460258337:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926573;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:07.513513Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824368460258337:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322926587;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:07.513559Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824368460258337:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322926601;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322926601 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-YDB_VALUE >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> KqpAcl::ReadSuccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[10,true,1,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 28568, MsgBus: 31046 2025-10-24T16:21:59.969036Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824337338499685:2160];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:59.969054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126f/r3tmp/tmpI7OmQr/pdisk_1.dat 2025-10-24T16:22:00.074587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.074615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.075534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:00.082661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:00.083391Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:00.083579Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824337338499562:2081] 1761322919967230 != 1761322919967233 TServer::EnableGrpc on GrpcPort 28568, node 1 2025-10-24T16:22:00.090608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.090616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.090618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.090649Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31046 TClient is connected to server localhost:31046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:22:00.135177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:00.140093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:00.381306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:00.508298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341633467533:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.508338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.508465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341633467543:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.508480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.567451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.611548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.611965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.612021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.612039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.612056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.612210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.612227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.612245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.612246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.612256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.612296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.612317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.612336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.612355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.612535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.612547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.612560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.612574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.612734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.612748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341633467657:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.612827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824341633467658:2324];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV ... ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.527995Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528133Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528268Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528308Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528410Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528424Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528538Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528664Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:07.528718Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:07.534053Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371849374671:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.534087Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.534181Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371849374676:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.534200Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824371849374677:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.534216Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.534887Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.539687Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824371849374680:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:07.618863Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824371849374731:2688] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:4/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[1,true,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63273, MsgBus: 20661 2025-10-24T16:22:01.370889Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824345754003907:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:01.370918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132a/r3tmp/tmpQm7NyC/pdisk_1.dat 2025-10-24T16:22:01.521418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:01.521444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:01.522352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:01.541310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:01.546149Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:01.546330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824345754003881:2081] 1761322921370246 != 1761322921370249 TServer::EnableGrpc on GrpcPort 63273, node 1 2025-10-24T16:22:01.569943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:01.569955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:01.569957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:01.570002Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20661 TClient is connected to server localhost:20661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:01.720403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:01.726431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:02.377876Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:02.479868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824350048971864:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.479969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.480159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824350048971874:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.480174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.480270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824350048971876:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.480279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.572445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:02.590191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.590543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.596143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.596184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.596204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.596229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.596257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.596278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.596309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.596341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.596364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.596385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.596407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350048971929:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.598217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:02.598232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:02.598245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:02.598250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:02.598285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:02.598291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:02.598301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:02.598306Z node 1 :TX_COLUMNSH ... recatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.549653Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:07.549656Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:07.549663Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:07.549671Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:07.549680Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:07.549688Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.549695Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:07.549698Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:07.551238Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824370533314304:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=19745013921040;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322927551;max=18446744073709551615;plan=0;src=[6:7564824370533313940:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.552924Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.552938Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.552939Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:07.564392Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314376:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.564414Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.564553Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314379:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.564561Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.566021Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.567828Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:07.572194Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314408:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.572211Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.572340Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314410:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.572348Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.575192Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.580594Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:07.584798Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314441:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.584819Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314446:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.584826Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.584900Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370533314449:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.584916Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.585569Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.588569Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824370533314448:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:07.639953Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824370533314501:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[1,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1152, MsgBus: 13657 2025-10-24T16:22:01.510417Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824344508779876:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:01.511167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126b/r3tmp/tmpCdAT0C/pdisk_1.dat 2025-10-24T16:22:01.626881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:01.630287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:01.630329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:01.631749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:01.662457Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1152, node 1 2025-10-24T16:22:01.694681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:01.694694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:01.694696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:01.694735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13657 TClient is connected to server localhost:13657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:01.837954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:01.840976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:01.884623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:02.468728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824348803747782:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.468766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.472064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824348803747792:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.472100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.515922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:02.570577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:02.587703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.587813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.587926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.587953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.588011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.588032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.588055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.588125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.588333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.588354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.588374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.588394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.588441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824348803747854:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.596472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:02.596492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:02.596512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:02.596517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:02.596545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:02.596550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:02.596563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:02.596568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:02.596576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:02.596582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Sy ... E=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:07.594258Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:07.594266Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.594273Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:07.594281Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:07.594288Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:07.594297Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:07.594311Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:07.594319Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.594330Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:07.594338Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:07.596041Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824370050055456:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=55056087578016;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322927595;max=18446744073709551615;plan=0;src=[6:7564824370050055096:2153];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.598511Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.598524Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:07.598527Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:07.604292Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055529:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.604314Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.604367Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055532:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.604377Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.605684Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.610451Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:07.616111Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055561:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.616130Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.616170Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055564:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.616173Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.617870Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:07.623355Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:07.629907Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055594:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.629940Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.629942Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055599:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.629990Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824370050055601:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.629997Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.630812Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.638063Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824370050055602:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:07.696966Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824370050055654:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,100,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,100,0.5] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestFilterCompare-SCAN_QUERY-DATASHARD-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 11941, MsgBus: 28398 2025-10-24T16:22:00.933192Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824340538599121:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.933208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132b/r3tmp/tmpuCaCdN/pdisk_1.dat 2025-10-24T16:22:01.084172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:01.095633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:01.095656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:01.100576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:01.127903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824340538599092:2081] 1761322920931729 != 1761322920931732 2025-10-24T16:22:01.129607Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11941, node 1 2025-10-24T16:22:01.152034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:01.152047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:01.152049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:01.152080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28398 TClient is connected to server localhost:28398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:01.266997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:01.340923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:01.851792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824344833567071:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.851820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.852176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824344833567081:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.852190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.939177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:02.013147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:02.057867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.058060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.058198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.058217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.058331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.058348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.058368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.058385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.058402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.058685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.058705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.058833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.058851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349128534442:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.065247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:02.065265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:02.065282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:02.065288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:02.065521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:02.065525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:02.065537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:02.065542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:02.065551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:02.065557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPorti ... 7], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.875423Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.876017Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:06.881628Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824369023973365:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:06.948440Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824369023973416:2401] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:06.978419Z node 6 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322927021, txId: 281474976715661] shutting down 2025-10-24T16:22:07.008381Z node 6 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322927049, txId: 281474976715663] shutting down 2025-10-24T16:22:07.040260Z node 6 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322927084, txId: 281474976715665] shutting down 2025-10-24T16:22:07.068045Z node 6 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322927112, txId: 281474976715667] shutting down Trying to start YDB, gRPC: 21610, MsgBus: 13247 2025-10-24T16:22:07.401140Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7564824372579034829:2149];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:07.401271Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132b/r3tmp/tmpz3jSV7/pdisk_1.dat 2025-10-24T16:22:07.404449Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:07.413982Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:07.414013Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:07.414637Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:07.417259Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21610, node 7 2025-10-24T16:22:07.423988Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:07.424003Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:07.424005Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:07.424054Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13247 TClient is connected to server localhost:13247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:07.495800Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:07.496920Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:07.669499Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:07.837398Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372579035380:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.837447Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.838548Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372579035400:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.838564Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.838956Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:07.862902Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372579035492:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.862925Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.863063Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372579035497:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.863072Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372579035498:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.863087Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.863780Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.866197Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:22:07.866257Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824372579035501:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:07.923959Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824372579035552:2402] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:07.965698Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322928008, txId: 281474976715661] shutting down 2025-10-24T16:22:08.009587Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322928050, txId: 281474976715663] shutting down 2025-10-24T16:22:08.049555Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322928092, txId: 281474976715665] shutting down 2025-10-24T16:22:08.087809Z node 7 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322928127, txId: 281474976715667] shutting down >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,1000000,0.5] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,1000000,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,1000000,0.5] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,0,0] [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,0,0.5] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0.5,BLOOM_FILTER] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,0,1000000,0] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,100,0.5] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-CSV >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,0,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestJoinByBool-EXECUTE_QUERY-COLUMNSHARD-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 21637, MsgBus: 32334 2025-10-24T16:22:00.240446Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824339796272089:2165];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.240482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00132d/r3tmp/tmpd6yd4I/pdisk_1.dat 2025-10-24T16:22:00.283595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21637, node 1 2025-10-24T16:22:00.300100Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:00.316236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.316247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.316249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.316281Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:00.343556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.343580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.344778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32334 TClient is connected to server localhost:32334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.402856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:00.406479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:00.499613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:00.976693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339796272634:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.976719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.980011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339796272644:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.980034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.137181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:01.156332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.156385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.156428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.156441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.156454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.156471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.156484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:01.156501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:01.156513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:01.156528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:01.156540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:01.156555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:01.156567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824344091239993:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:01.160334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:01.160350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:01.160394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:01.160401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:01.160422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:01.160427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:01.160440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:01.160445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:01.160454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:01.160484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:01.160491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstr ... RD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:07.891728Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:07.891750Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:07.891770Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:07.891793Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.891816Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:07.891838Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:07.891871Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.892884Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:07.892894Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:07.892918Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:07.892924Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:07.892942Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:07.892946Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:07.892955Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:07.892960Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:07.892967Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:07.892972Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:07.892978Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:22:07.892982Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:22:07.893003Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:22:07.893008Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:07.893019Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:07.893022Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.893027Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:07.893031Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:07.893036Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:07.893041Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:07.893054Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:07.893058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.893068Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:07.893071Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:07.895774Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824372445453803:2340];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710659;this=124901848443728;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=1761322927895;max=18446744073709551615;plan=0;src=[7:7564824372445453250:2154];cookie=22:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:07.897338Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:07.897355Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:07.897357Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:07.933609Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372445453865:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.933632Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.933685Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372445453871:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.933697Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824372445453870:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.933712Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.934367Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:07.936759Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824372445453874:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-10-24T16:22:08.035727Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824376740421221:2446] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:08.436175Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,1000000,0] >> KqpOlapJson::FilterVariants[10,false,0,10,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,0,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,0,0] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,1000000,0] >> KqpOlap::SelectLimit1ManyShards [GOOD] >> KqpOlap::SimpleCount >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,1000000,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,0,1000,0.5,BLOOM_FILTER] [GOOD] Test command err: Trying to start YDB, gRPC: 8593, MsgBus: 23610 2025-10-24T16:21:53.049404Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824310169694954:2209];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:53.049444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001339/r3tmp/tmpdzpmdJ/pdisk_1.dat 2025-10-24T16:21:53.097775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:53.098069Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:53.098231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824310169694764:2081] 1761322913047385 != 1761322913047388 TServer::EnableGrpc on GrpcPort 8593, node 1 2025-10-24T16:21:53.105769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:53.105782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:53.105784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:53.105813Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23610 TClient is connected to server localhost:23610 2025-10-24T16:21:53.156005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:53.156034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:53.157217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:53.170902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:53.173699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:53.275648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:53.722415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310169695439:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.722446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.722593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310169695449:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.722605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.722640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824310169695451:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.722650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:53.777185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:53.800287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.800443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.800507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.800589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.800606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.800624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.800642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:53.800645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:53.800658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:53.800658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:53.800709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:53.800727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:53.800779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:53.800786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:53.800805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:53.800807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:53.800826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824310169695527:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:53.800829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:53.800851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824310169695512:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalize ... RD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824373423243058:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322927525;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:08.452279Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824373423243058:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322927665;tx_id=281474976710667;;new_schema=Id: 0 SinceStep: 1761322927665 SinceTxId: 281474976710667 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 4 Version: 4 Indexes { Id: 3 Name: "a_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:08.452694Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:08.452702Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:08.452707Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=2; 2025-10-24T16:22:08.452713Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=3;to=4; 2025-10-24T16:22:08.452808Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322927497;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:08.452816Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322927511;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:08.452819Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322927525;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:08.452867Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322927665;tx_id=281474976710667;;new_schema=Id: 0 SinceStep: 1761322927665 SinceTxId: 281474976710667 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 4 Version: 4 Indexes { Id: 3 Name: "a_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:08.453427Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824373423243058:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:08.453477Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824373423243043:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:08.460396Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;task_id=965b3e96-b0f511f0-9c8ed6d6-e92b59be;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:08.460561Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;task_id=965b5426-b0f511f0-a5c43fea-2bc4e3fd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a1" ORDER BY Col1; COMPARE: [[1u;["{\"a.b.c\":\"a1\"}"]]] OUTPUT: [[1u;["{\"a.b.c\":\"a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=DROP_INDEX, NAME=a_index) 2025-10-24T16:22:08.812452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:08.814853Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:08.814876Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "1a1" ORDER BY Col1; COMPARE: [[11u;["{\"a.b.c\":\"1a1\"}"]]] OUTPUT: [[11u;["{\"a.b.c\":\"1a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_INDEX, NAME=b_index, TYPE=CATEGORY_BLOOM_FILTER, FEATURES=`{"column_name" : "Col2", "false_positive_probability" : 0.01}`) 2025-10-24T16:22:08.922967Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:08.930162Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:08.930529Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[1,false,1,1000,100,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 26228, MsgBus: 20257 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001326/r3tmp/tmpz4Vqkw/pdisk_1.dat 2025-10-24T16:22:03.580849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:03.580890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824352501066185:2081] 1761322923498824 != 1761322923498827 2025-10-24T16:22:03.580913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:03.583382Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26228, node 1 2025-10-24T16:22:03.589846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:03.589858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:03.589859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:03.589906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:03.612082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:03.612108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:03.613074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20257 TClient is connected to server localhost:20257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:03.652883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:03.864349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:04.006882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824356796034164:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.006910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.006993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824356796034174:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.007005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:04.056564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:04.071414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:04.071509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:04.071559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:04.071580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:04.071610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:04.071630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:04.071652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:04.071671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:04.071700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:04.071732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:04.071753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:04.071781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:04.071801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824356796034227:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:04.072636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:04.072650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:04.072662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:04.072666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:04.072711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:04.072715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:04.072727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:04.072731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:04.072737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:04.072741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:04.072748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;pr ... ription=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:08.632099Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:22:08.632104Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:22:08.632128Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:22:08.632136Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:08.632150Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:08.632155Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:08.632160Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:08.632165Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:08.632171Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:08.632176Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:08.632187Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:08.632194Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:08.632203Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:08.632207Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:08.636849Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824375071016805:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=21288966837616;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322928636;max=18446744073709551615;plan=0;src=[7:7564824375071016437:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:08.638851Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:08.638868Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:08.638873Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:08.648344Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824375071016879:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.648369Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.648531Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824375071016882:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.648538Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.652340Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:08.662152Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:08.676154Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:08.676772Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824375071016911:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.676852Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.676964Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824375071016923:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.676968Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.681769Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:22:08.694885Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824375071016805:2323];path_id=1000000889;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:08.694901Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824375071016805:2323];path_id=1000000889;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:08.694905Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564824375071016805:2323];path_id=1000000889;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:08.694931Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824375071016805:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=968036ce-b0f511f0-882b8900-f1eec5d3;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:08.694976Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824375071016805:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;tablet_id=72075186224037888;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037888 in longTx ydb://long-tx/01k8bge2hj724c34hedrs5wyxc?node_id=7 >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-CSV [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[true,false,0,0,0,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[true,false,0,0,0,0,BLOOM_FILTER] >> KqpOlapJson::SimpleExistsVariants[1,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,1000000,0.5] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,1000000,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,100,0.5] >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,1000000,0.5] >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,1000000,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[10,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 11421, MsgBus: 21606 2025-10-24T16:22:02.313864Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824350375076392:2261];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:02.314103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001329/r3tmp/tmpe6risV/pdisk_1.dat 2025-10-24T16:22:02.414575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:02.414601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:02.415784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:02.417191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:02.417320Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11421, node 1 2025-10-24T16:22:02.423042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:02.423052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:02.423054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:02.423092Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21606 TClient is connected to server localhost:21606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:02.500627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:02.692442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:02.872141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824350375076813:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.872164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.872399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824350375076823:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.872415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:02.935453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:02.970086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.970155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.970199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.970223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.970241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.970268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.970286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.970307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.970330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.970347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.970365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.970382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.970401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564824350375076963:2327];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.970923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.970952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.975963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.976019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.976041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.976067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.976087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.976108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.976127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824350375076954:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.976149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224 ... olumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740400Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740513Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740626Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740735Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740852Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.740983Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:09.741098Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:09.753262Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824382042909125:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.753284Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.753393Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824382042909130:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.753400Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824382042909131:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.753445Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.754207Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:09.756723Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:09.756804Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824382042909134:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:09.821229Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824382042909185:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:4/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[[""]];[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1],\"size\":[2,11],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\"],\"records\":[1,1]},\"o_size\":0,\"h_size\":381,\"c_size\":488}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1],\"size\":[2,2],\"key_names\":[\"\\\"b.c.d\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[1,1]},\"o_size\":0,\"h_size\":381,\"c_size\":480}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1,1],\"size\":[2,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\",\"\\\"c.d.e\\\"\"],\"records\":[1,1,1]},\"o_size\":0,\"h_size\":410,\"c_size\":720}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"a.b.c\\\"\"],\"records\":[1]},\"o_size\":0,\"h_size\":360,\"c_size\":240}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestAggregation-EXECUTE_QUERY-DATASHARD-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 30739, MsgBus: 7421 2025-10-24T16:22:00.072522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824341141864856:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.072555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f57/r3tmp/tmpAqGAJj/pdisk_1.dat 2025-10-24T16:22:00.148017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:00.148375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.148448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.149545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:00.171352Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30739, node 1 2025-10-24T16:22:00.201194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.201209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.201212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.201265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7421 TClient is connected to server localhost:7421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.276788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:00.281201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:00.412803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:00.649665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341141865480:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.649688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.649848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824341141865490:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.649855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.827984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:00.868991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:00.869493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:00.870045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:00.870070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:00.870093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:00.870238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:00.870271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:00.870301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:00.870450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:00.870479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:00.870503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:00.870668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:00.870696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824341141865545:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:00.873968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:00.873984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:00.874000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:00.874005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:00.874025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:00.874029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:00.874042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:00.874046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:00.874053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:00.874056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:00.874062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract ... to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.158087Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:09.182689Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824381072963253:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.182712Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.182839Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824381072963258:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.182846Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824381072963259:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.182900Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:09.183641Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:09.185826Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:22:09.185881Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824381072963262:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:09.243289Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824381072963313:2404] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 27819, MsgBus: 26000 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f57/r3tmp/tmpbxA2jU/pdisk_1.dat 2025-10-24T16:22:09.716202Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:09.716240Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:09.721473Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:09.721496Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:09.722465Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:09.733435Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27819, node 7 2025-10-24T16:22:09.752216Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:09.752227Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:09.752229Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:09.752267Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26000 TClient is connected to server localhost:26000 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:22:09.868413Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:09.884413Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:09.893952Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:10.163418Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824383536124700:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.163468Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.165215Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:10.166486Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824383536124720:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.166567Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.209077Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824383536124811:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.209102Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.209235Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824383536124817:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.209242Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.209248Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824383536124816:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.210054Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:10.212608Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:22:10.212673Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824383536124820:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:10.273000Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824383536124871:2406] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpOlapJson::SimpleVariants[10,false,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[1,true,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 17632, MsgBus: 24657 2025-10-24T16:22:03.361858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824353596347163:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:03.361932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001327/r3tmp/tmpGXLiVP/pdisk_1.dat 2025-10-24T16:22:03.419327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:03.425938Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17632, node 1 2025-10-24T16:22:03.452096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:03.452110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:03.452112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:03.452147Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:03.464998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:03.465024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:03.466065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24657 TClient is connected to server localhost:24657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:03.541694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:03.548062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:03.695526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:03.896166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824353596347732:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.896199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.899457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824353596347742:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.899482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.927676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:03.937750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:03.937842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:03.937890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:03.937925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:03.937946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:03.937970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:03.937995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:03.938015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:03.938033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:03.938059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:03.938084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:03.938114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:03.938139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824353596347795:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:03.938670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:03.938687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:03.938700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:03.938705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:03.938737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:03.938747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:03.938760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:03.938765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:03.938778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:03.938783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:03.938795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSche ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:10.140627Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:10.140634Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:10.140640Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:10.140646Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:10.140652Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:10.140667Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:10.140674Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:10.140685Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:10.140689Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:10.142916Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824386287192655:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=21788246346832;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322930142;max=18446744073709551615;plan=0;src=[6:7564824377697257688:2150];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:10.149310Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:10.149331Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:10.149334Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:10.164785Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192733:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.164807Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.164883Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192735:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.164888Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.167412Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:10.176098Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:10.195004Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:10.195355Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192765:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.195374Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.195578Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192777:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.195595Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.203077Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:10.225603Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192798:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.225629Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.227946Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192803:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.227975Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824386287192804:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.228033Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.232932Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:10.235403Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:10.235523Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824386287192807:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:10.296754Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824386287192858:2447] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::FilterVariantsCount[2,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,1000000,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,1,0,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,0,0] >> KqpOlapJson::SimpleExistsVariants[10,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,1000,0,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,1000,0,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,1000000,0] >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,0,0.5] [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,1000,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleVariants[10,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 5156, MsgBus: 9959 2025-10-24T16:22:02.945849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824351353512838:2071];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:02.945875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001328/r3tmp/tmpzOHeu2/pdisk_1.dat 2025-10-24T16:22:03.003841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:03.015719Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5156, node 1 2025-10-24T16:22:03.027161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:03.027172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:03.027173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:03.027220Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9959 TClient is connected to server localhost:9959 2025-10-24T16:22:03.064355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:03.064383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:03.065527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:03.080878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:03.085724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:03.224402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:03.465784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824355648480775:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.465806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.466215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824355648480785:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.466229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:03.541243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:03.586005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:03.586084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:03.586150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:03.586174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:03.586187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:03.586197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:03.586212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:03.586261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:03.586268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:03.586294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:03.586300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:03.586321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:03.586330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:03.586347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:03.586360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:03.586370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:03.586392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:03.586393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:03.586412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:03.586418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:03.586449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564824355648480902:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:03.586453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824355648480901:2325];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline ... d: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661145Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661308Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661308Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661447Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661450Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661579Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661581Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661707Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:10.661707Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:10.672208Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824382608974734:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.672232Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.672315Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824382608974739:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.672320Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824382608974740:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.672370Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.674039Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:10.677322Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824382608974743:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:10.779307Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824382608974794:2694] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::RestoreFullJsonVariants[10,false,1024,0,0,0.5] >> KqpOlapJson::FilterVariants[2,true,0,1000,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,0,0.5] >> KqpOlapJson::FilterVariantsCount[1,false,0,10,100,0] >> KqpAcl::ReadSuccess [GOOD] >> KqpAcl::WriteSuccess >> KqpOlapJson::CompactionVariants[10,false,1,0,1000000,0] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,1000000,0.5] >> KqpOlapJson::OrFilterVariants[1,false,1024,0,1000000,0] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,0,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,100,0] >> KqpOlapJson::FilterVariantsCount[2,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,0,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,0,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,100,0] >> KqpOlapJson::FilterVariants[10,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,0,0] >> IndexBuildTestReboots::IndexPartitioning [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,0,0,0.5] >> KqpOlapJson::BloomMixIndexesVariants[true,false,0,0,0,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,0,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,true,1,0,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 61962, MsgBus: 14018 2025-10-24T16:21:58.238579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824333403644508:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:58.238623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b4f/r3tmp/tmpOvFRmk/pdisk_1.dat 2025-10-24T16:21:58.267951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.277049Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:58.277228Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824333403644481:2081] 1761322918238455 != 1761322918238458 TServer::EnableGrpc on GrpcPort 61962, node 1 2025-10-24T16:21:58.285801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:58.285814Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:58.285816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:58.285869Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14018 TClient is connected to server localhost:14018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:58.342383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:58.342419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:58.343543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:58.348679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:58.483282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:58.603784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824333403645161:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.603814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.603935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824333403645171:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.603961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.633778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:58.641540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:58.641585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:58.641613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:58.641632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:58.641645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:58.641665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:58.641684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:58.641724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:58.641743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:58.641765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:58.641786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:58.641806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:58.641825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824333403645224:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:58.642261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:58.642280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:58.642300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:58.642304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:58.642316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:58.642319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:58.642327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:58.642330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:58.642334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:58.642337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:58.642341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... kloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.623466Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:11.636434Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824387759623181:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.636451Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.638388Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824387759623184:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.638405Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.638855Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:11.642109Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:11.672234Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824387759623214:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.672258Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.672379Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824387759623220:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.672432Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824387759623219:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.672436Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.673181Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:11.676021Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-10-24T16:22:11.676094Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824387759623223:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:22:11.728943Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824387759623274:2464] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:11.761686Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:22:11.791661Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:22:11.836273Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:11.843090Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:12.578712Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:12.578728Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:12.578737Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:12.578917Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322931627;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:12.578929Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322931655;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:12.578935Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322931669;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:12.579693Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824387759623042:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322931690;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322931690 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,100,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,1000,0,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,100,0.5] >> KqpOlap::SimpleCount [GOOD] >> KqpOlap::ScanFailedSnapshotTooOld >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-ARROW >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,0,0] >> KqpOlapAggregations::Aggregation_ResultT_FilterL_OrderT_Limit2 [GOOD] >> KqpOlapAggregations::Aggregation_Some >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,0,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,1,0,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,true,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1682, MsgBus: 1691 2025-10-24T16:22:00.238496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824339786024243:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:00.238519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126d/r3tmp/tmp3kdQB2/pdisk_1.dat 2025-10-24T16:22:00.282590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:00.295786Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1682, node 1 2025-10-24T16:22:00.299664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824339786024200:2081] 1761322920238126 != 1761322920238129 2025-10-24T16:22:00.312324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:00.312335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:00.312337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:00.312377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:00.342837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:00.342867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:00.343910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1691 TClient is connected to server localhost:1691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:00.399078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:00.462749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:00.949114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339786024875:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.949250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.951109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339786024885:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.951133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.951173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824339786024888:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:00.951185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:01.115765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:01.150157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.150225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.150328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.150349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.150366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.150411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.150430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:01.150442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:01.150448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:01.150481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:01.150484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:01.150503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:01.150503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:01.150534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:01.150584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:01.150653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:01.150676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:01.150680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:01.150697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824344080992322:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:01.150703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824344080992319:2327] ... " Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:13.515486Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824392219038907:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:13.515497Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824392219038907:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:13.515550Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824392219038907:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932572;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:13.515558Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824392219038907:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932614;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:13.515592Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824392219038907:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322932803;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322932803 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:13.515705Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[6:7564824392219038885:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:13.515708Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[6:7564824392219038885:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:13.515751Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[6:7564824392219038885:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932572;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:13.515757Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[6:7564824392219038885:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932614;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:13.515760Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824392219038886:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:13.515764Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824392219038886:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:13.515781Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[6:7564824392219038885:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322932803;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322932803 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:13.515842Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824392219038886:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932572;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:13.515862Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824392219038886:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932614;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:13.515910Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824392219038886:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322932803;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322932803 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:13.524153Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:13.524166Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:13.524169Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:13.524290Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932572;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:13.524299Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932614;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:13.524310Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322932803;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:22:13.524391Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824392219038880:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322932817;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761322932817 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> KqpAcl::WriteSuccess [GOOD] >> KqpAcl::FailedWriteAccessDenied >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,100,0.5] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,100,0.5] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[10,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12359, MsgBus: 14648 2025-10-24T16:22:06.579191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824367217375006:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:06.579310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:06.583367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001322/r3tmp/tmp6sbZ4C/pdisk_1.dat 2025-10-24T16:22:06.617085Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12359, node 1 2025-10-24T16:22:06.633014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:06.633024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:06.633025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:06.633061Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14648 2025-10-24T16:22:06.682956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:06.682980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:06.683993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14648 2025-10-24T16:22:06.699864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:06.712715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:06.716043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:06.968084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824367217375640:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.968119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.968258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824367217375650:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.968264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.013581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:07.042560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:07.042624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:07.042674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:07.042710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:07.042729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:07.042749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:07.042764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:07.042776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:07.042790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:07.042801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.042813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:07.042823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824371512343092:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:07.042827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:07.042836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824371512343092:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:07.042839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824371512343093:2331];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.043366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:07.043380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:07.043393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:07.043397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:07.043415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:07.043419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:07.043431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:07.043435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:07.043442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207 ... 24T16:22:13.873433Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873586Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873679Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873731Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873849Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873860Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.873964Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.874006Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:13.874091Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:13.879416Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824395360133525:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.879433Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.879549Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824395360133530:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.879554Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824395360133531:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.879567Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.880243Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:13.884571Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:13.884643Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824395360133534:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:13.956571Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824395360133585:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants[2,true,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63463, MsgBus: 1930 2025-10-24T16:22:06.967335Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824365025602770:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:06.967355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00131e/r3tmp/tmpKco7hg/pdisk_1.dat 2025-10-24T16:22:07.017393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:07.029832Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63463, node 1 2025-10-24T16:22:07.039608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:07.039621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:07.039623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:07.039665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1930 2025-10-24T16:22:07.069819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:07.069848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:07.072118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:07.097248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:07.279958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:07.414583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824369320570698:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.414606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.414768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824369320570708:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.414777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:07.456983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:07.469396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:07.469455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:07.469498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:07.469518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:07.469544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:07.469567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:07.469591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:07.469610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:07.469631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:07.469652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:07.469672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:07.469698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:07.469719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824369320570769:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:07.470854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:07.470867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:07.470881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:07.470886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:07.470907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:07.470912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:07.470925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:07.470930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:07.470939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:07.470944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:07.470960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:22:07.470966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id= ... :7564824390908281538:2150];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.699975Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564824390908281914:2323];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710658;this=20528791465504;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322932699;max=18446744073709551615;plan=0;src=[6:7564824390908281538:2150];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.705350Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.705367Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.705370Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.706828Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.706837Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:12.706840Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:12.726958Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:12.727230Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282017:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.727254Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.730956Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282029:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.730982Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.733869Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:12.734011Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:12.744067Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282053:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.744093Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.745424Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282056:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.745443Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.746218Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:12.748783Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:12.748784Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-10-24T16:22:12.768948Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282090:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.768983Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.769130Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282095:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.769138Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824390908282096:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.769204Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.770021Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:12.772735Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:12.772795Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824390908282099:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:12.859212Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824390908282150:2467] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] OUTPUT: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22549, MsgBus: 28214 2025-10-24T16:21:58.788811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824332194626987:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:58.788831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f59/r3tmp/tmpS8yGYR/pdisk_1.dat 2025-10-24T16:21:58.823062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.834520Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:58.834675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824332194626962:2081] 1761322918788694 != 1761322918788697 TServer::EnableGrpc on GrpcPort 22549, node 1 2025-10-24T16:21:58.842248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:58.842260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:58.842262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:58.842308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28214 TClient is connected to server localhost:28214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:58.891686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:58.899590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:58.899616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:58.900669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:21:58.981630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:59.388203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336489594934:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.388224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.388301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824336489594944:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.388316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:59.510060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:59.518393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:59.518453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:59.518494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:59.518514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:59.518537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:59.518555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:59.518572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:59.518591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:59.518612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:59.518640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:59.518670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:59.518697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:59.518728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824336489594997:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:59.519226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:59.519242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:59.519255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:59.519271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:59.519294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:59.519305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:59.519317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:59.519327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:59.519335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:59.519346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:59.519354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.179150Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:13.182408Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:13.199806Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824396312730190:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.199830Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.199893Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824396312730193:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.199901Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.201616Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:13.205215Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:13.213725Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824396312730223:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.213748Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.213825Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824396312730228:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.213837Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824396312730229:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.213849Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.215211Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:13.219529Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824396312730232:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:22:13.292732Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824396312730283:2464] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:13.339050Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:22:13.372291Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:22:13.401237Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:14.128564Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:14.128581Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:14.128587Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:14.128759Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322933181;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:14.128771Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322933209;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:14.128776Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322933230;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:14.128881Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824396312730054:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322933251;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322933251 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,1000,0] [GOOD] >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,1000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,1000000,0] >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsLocksFields >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,0,0.5] >> KqpOlapJson::FilterVariants[10,false,0,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,0,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,100,0] >> KqpOlapAggregations::Aggregation_Some [GOOD] >> KqpOlapAggregations::Aggregation_Some_Null >> KqpOlapJson::OrFilterVariants[1,false,1024,10,0,0] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,0,0.5] >> KqpOlapJson::OrFilterVariants[2,true,1,10,1000000,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,1000000,0] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes >> KqpOlapJson::RestoreFullJsonVariants[10,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,0,0] >> KqpOlapJson::FilterVariants[2,true,0,1000,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,0,0] >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 27128, MsgBus: 6985 2025-10-24T16:22:06.312856Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824368447240244:2192];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:06.312892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001324/r3tmp/tmppMX7mc/pdisk_1.dat 2025-10-24T16:22:06.373425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:06.374349Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27128, node 1 2025-10-24T16:22:06.392067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:06.392080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:06.392081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:06.392117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:06.413993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:06.414024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:06.414847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6985 TClient is connected to server localhost:6985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:06.472305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:06.669860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:06.736483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824368447240739:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.736510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.736691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824368447240749:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.736699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.782465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:06.796145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:06.796224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:06.796270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:06.796296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:06.796334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:06.796356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:06.796380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:06.796401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:06.796421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:06.796441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.796474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:06.796495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:06.796519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368447240808:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:06.800221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:06.800238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:06.800278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:06.800303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:06.800324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:06.800346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:06.800450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:06.800473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:06.800492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368447240828:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:06.800511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=720751862240378 ... vent=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:14.522880Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:14.524523Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824399536166682:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=88689924179088;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322934524;max=18446744073709551615;plan=0;src=[7:7564824399536166300:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.529261Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.529277Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.529280Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.530511Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.530520Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:14.530522Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:14.550381Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:14.550792Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166786:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.550989Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.551125Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166796:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.551132Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.552236Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:14.552361Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:14.570961Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166820:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.570989Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.571125Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166823:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.571132Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.572139Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:14.576494Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:14.576658Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:14.580569Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166857:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.580582Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.580857Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166862:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.580865Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824399536166863:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.580898Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.581452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:14.589123Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824399536166866:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:14.681425Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824399536166917:2468] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:2/0/0 HEADER:0/0/0 2025-10-24T16:22:15.020325Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpAcl::FailedWriteAccessDenied [GOOD] >> KqpAcl::RecursiveCreateTableShouldSuccess >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,0,0] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,100,0] >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,100,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,false,1,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,10,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,10,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,100,0.5] >> KqpOlapJson::FilterVariants[10,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,100,0] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,0,0] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,0,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,100,0] >> KqpOlapAggregations::Aggregation_Some_Null [GOOD] >> KqpOlapAggregations::Aggregation_Some_GroupBy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeVariants[10,false,1,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 11534, MsgBus: 5316 2025-10-24T16:22:07.461825Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824371957753955:2132];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:07.461841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00131d/r3tmp/tmpyN67w2/pdisk_1.dat 2025-10-24T16:22:07.468000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:07.498578Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824371957753863:2081] 1761322927461380 != 1761322927461383 2025-10-24T16:22:07.503411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:07.503439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:07.504494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:07.508240Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11534, node 1 2025-10-24T16:22:07.518106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:07.518114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:07.518116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:07.518240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5316 TClient is connected to server localhost:5316 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:22:07.563372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:07.574160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:07.580005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:08.163305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824376252721831:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.163370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.163499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824376252721841:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.163506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.163604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824376252721843:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.163610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.211100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:08.248970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:08.249043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:08.249087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:08.249109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:08.249129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:08.249150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:08.249171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:08.249191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:08.249211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:08.249236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:08.249257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:08.249276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:08.249296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824376252721979:2330];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:08.252676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:08.252697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:08.252732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:08.252754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:08.252776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:08.252799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824376252721977:2328];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_reg ... ersion=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422437Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422488Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422569Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422702Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422712Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422843Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422893Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:16.422970Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:22:16.439290Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824408359301858:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.439308Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.439450Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824408359301863:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.439456Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824408359301864:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.439460Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.440156Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:16.443121Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:16.443180Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824408359301867:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:16.531530Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824408359301918:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" ORDER BY Col1; 2025-10-24T16:22:16.620461Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:8/0/0 HEADER:0/0/0 >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,1000000,0.5] >> KqpOlapJson::CompactionVariants[10,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,0,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::IndexPartitioning [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.739988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.740006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.740010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.740016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.740021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.740025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.740032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.740043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.740138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.740184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.756558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.756583Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.756673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.759041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.759096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.759123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.761276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.761403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.761509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.761560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.762392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.762426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.762614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.762622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.762647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.762655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.762661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.762680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.763907Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.784390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.784464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.784510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.784518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.784561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.784597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.785114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.785196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.785206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.785210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.785478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.785748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.785759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.785763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.786244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.786552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.786578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.786705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.786722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:13.922664Z node 323 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:22:13.922686Z node 323 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 23us result status StatusSuccess 2025-10-24T16:22:13.922797Z node 323 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Index" LocalPathId: 4 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:13.922842Z node 323 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:22:13.922875Z node 323 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 34us result status StatusSuccess 2025-10-24T16:22:13.922992Z node 323 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants[1,false,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7084, MsgBus: 29783 2025-10-24T16:22:07.696996Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824373335568681:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:07.697096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00131c/r3tmp/tmp81FDYT/pdisk_1.dat 2025-10-24T16:22:07.746760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:07.765084Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:07.765397Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824373335568651:2081] 1761322927696101 != 1761322927696104 TServer::EnableGrpc on GrpcPort 7084, node 1 2025-10-24T16:22:07.800149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:07.800168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:07.800170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:07.800222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:07.802804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:07.802833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:07.804484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29783 TClient is connected to server localhost:29783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:08.009171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:08.015597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:08.032449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:08.569819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824377630536622:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.569847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.570035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824377630536632:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.570045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:08.704042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:08.730839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:08.765900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:08.766000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:08.766051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:08.766076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:08.766136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:08.766161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:08.766185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:08.766208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:08.766231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:08.766253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:08.766281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:08.766305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:08.766330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824377630536694:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:08.766938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:08.766949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:08.766965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:08.766971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:08.766989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:08.767001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:08.767013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:08.767019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:08.767027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2 ... r_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:16.388325Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:16.388330Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:16.388336Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:16.388340Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:16.388347Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:16.388352Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:16.388551Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:16.388556Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:16.388566Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:16.388569Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:16.393657Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824410699608610:2325];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=125819911569072;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322936393;max=18446744073709551615;plan=0;src=[6:7564824406404640952:2155];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:16.396748Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:16.396762Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:16.396764Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:16.409948Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608683:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.409964Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.410069Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608685:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.410075Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.414846Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:16.417975Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:16.428178Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608715:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.428196Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.428263Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608717:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.428268Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.435187Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:16.438394Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-10-24T16:22:16.447375Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608748:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.447400Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.447466Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608754:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.447474Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824410699608753:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.447479Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.448229Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:16.451491Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824410699608757:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:16.531736Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824410699608808:2441] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] OUTPUT: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] INDEX:0/0/0 HEADER:0/0/0 >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,0,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,0,0] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,0,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,0,0.5] >> Backup::ChangelogLargeData [GOOD] >> Backup::ChangelogManyCommits >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0,BLOOM_FILTER] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,100,0] >> KqpOlapJson::FilterVariants[2,true,1,0,0,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,10,0,0.5] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TVersions::Wreck0Reverse [GOOD] >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,0,0] >> KqpOlapJson::FilterVariantsCount[1,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,10,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,0,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,100,0] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,10,0,0.5] >> Backup::ChangelogManyCommits [GOOD] >> Backup::ChangelogSchema [GOOD] >> Backup::ChangelogSchemaAndData [GOOD] >> Backup::ChangelogSchemaNewColumn >> KqpOlapJson::FilterVariants[1,true,0,10,100,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,0,0] >> KqpOlapJson::FilterVariants[10,false,0,1000,100,0] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,100,0.5] >> KqpOlapJson::QuotedFilterVariants[2,true,1024,0,0,0.5] >> KqpAcl::RecursiveCreateTableShouldSuccess [GOOD] >> KqpConstraints::AddSerialColumnForbidden >> Backup::ChangelogSchemaNewColumn [GOOD] >> Bloom::Conf [GOOD] >> Bloom::Hashes [GOOD] >> Bloom::Rater >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,1000000,0] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,100,0] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,100,0.5] >> Bloom::Rater [GOOD] >> Bloom::Dipping >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,100,0] >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,100,0.5] >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,1000,0.5] [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-CSV >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> KqpOlapJson::OrFilterVariants[2,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,0,0] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0,CATEGORY_BLOOM_FILTER] >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,1000000,0] >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,100,0] [GOOD] |52.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |52.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |52.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,10,0,0.5] [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 2025-10-24T16:21:56.854809Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1472: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:21:56.854884Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:828: Add page collection [1:0:256:0:0:0:1] 2025-10-24T16:21:56.854889Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:1] owner [1:5:2052] 2025-10-24T16:21:56.854903Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [1:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-10-24T16:21:56.854915Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #1 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #1 (done) Checking fetches#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] ... waiting for results #1 2025-10-24T16:21:56.855034Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 3 ] 2025-10-24T16:21:56.855044Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:1] owner [1:5:2052] class Online pages [ 1 2 3 ] cookie 1 2025-10-24T16:21:56.855050Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 366B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #1 (done) Checking results#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] 2025-10-24T16:21:56.862340Z node 2 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1472: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:21:56.862389Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:828: Add page collection [1:0:256:0:0:0:1] 2025-10-24T16:21:56.862394Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:1] owner [2:5:2052] 2025-10-24T16:21:56.862403Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [2:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-10-24T16:21:56.862414Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-10-24T16:21:56.862432Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:828: Add page collection [1:0:256:0:0:0:2] 2025-10-24T16:21:56.862435Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:2] owner [2:5:2052] 2025-10-24T16:21:56.862440Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:2] owner [2:5:2052] cookie 2 class Online from cache [ ] already requested [ ] to request [ 4 5 ] 2025-10-24T16:21:56.862449Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 610B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-10-24T16:21:56.862462Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:1] owner [2:6:2053] 2025-10-24T16:21:56.862467Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [2:6:2053] cookie 3 class Online from cache [ ] already requested [ ] to request [ 5 6 ] 2025-10-24T16:21:56.862472Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 854B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:21:56.862480Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:2] owner [2:6:2053] 2025-10-24T16:21:56.862484Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:2] owner [2:6:2053] cookie 4 class Online from cache [ ] already requested [ ] to request [ 6 7 ] 2025-10-24T16:21:56.862490Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 1.07KiB EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #4 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] ... waiting for results #4 2025-10-24T16:21:56.862544Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status ERROR pages [ 1 2 3 ] 2025-10-24T16:21:56.862548Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1071: Drop page collection [1:0:256:0:0:0:1] error ERROR 2025-10-24T16:21:56.862551Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1015: Send page collection error [1:0:256:0:0:0:1] owner [2:5:2052] class Online error ERROR cookie 1 2025-10-24T16:21:56.862554Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1015: Send page collection error [1:0:256:0:0:0:1] owner [2:6:2053] class Online error ERROR cookie 3 2025-10-24T16:21:56.862560Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 732B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] 2025-10-24T16:21:56.862585Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 6 ] 2025-10-24T16:21:56.862590Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 488B EvictedInMemoryBytes: 0B Checking results#4 Expected: Actual: ... waiting for results #4 2025-10-24T16:21:56.872743Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 6 7 ] 2025-10-24T16:21:56.872770Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:2] owner [2:6:2053] class Online pages [ 6 7 ] cookie 4 2025-10-24T16:21:56.872781Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B 2025-10-24T16:21:56.872790Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 4 5 ] 2025-10-24T16:21:56.872795Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:993: Send page collection result [1:0:256:0:0:0:2] owner [2:5:2052] class Online pages [ 4 5 ] cookie 2 2025-10-24T16:21:56.872801Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 488B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] 2025-10-24T16:21:56.883719Z node 3 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1472: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:21:56.883782Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:828: Add page collection [1:0:256:0:0:0:1] 2025-10-24T16:21:56.883788Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:1] owner [3:5:2052] 2025-10-24T16:21:56.883805Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:1] owner [3:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2025-10-24T16:21:56.883812Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:568: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] 2025-10-24T16:21:56.883825Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2025-10-24T16:21:56.883837Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:828: Add page collection [1:0:256:0:0:0:2] 2025-10-24T16:21:56.883841Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:280: Add page collection [1:0:256:0:0:0:2] owner [3:6:2053] 2025-10-24T16:21:56.883863Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:489: Request page collection [1:0:256:0:0:0:2] owner [3:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-10-24T16:21:56.883867Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #2 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #2 (done) Checking fetches#2 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] ... waiting for fetches #2 2025-10-24T16:21:56.883917Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:728: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2025-10-24T16:21:56.883925Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:568: Request page collection [1:0:256:0:0:0:1] async queue pages [ 3 4 ] 2025-10-24T16:21:56.883931Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with ... GECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.003 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.573906Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.014 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.014 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.014 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.014 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.014 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.014 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.014 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.015 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.589884Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.004 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.004 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.004 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1826b, 23} 00000.004 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.004 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.004 II| FAKE_ENV: All BS storage groups are stopped 00000.004 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.004 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.594600Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.004 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.005 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.005 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.005 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.005 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.005 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: All BS storage groups are stopped 00000.005 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.005 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.600329Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1770b, 27} 00000.003 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.604314Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.001 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.001 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.001 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.001 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.002 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.002 II| FAKE_ENV: All BS storage groups are stopped 00000.002 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.002 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.607070Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.014 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.014 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.014 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.014 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.014 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.014 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: All BS storage groups are stopped 00000.014 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.014 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.623091Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.003 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.003 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.003 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.003 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {753b, 11} 00000.003 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.003 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.003 II| FAKE_ENV: All BS storage groups are stopped 00000.003 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.003 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.627396Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR cookie 0 00000.014 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult (done) 00000.025 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 5 reqs hit {8 205278b} miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1351b, 17} 00000.025 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {105547b, 14} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {102560b, 2} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.025 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:57.653948Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.215 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled 00000.226 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.226 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.226 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.226 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {501b, 9} 00000.226 II| FAKE_ENV: DS.1 gone, left {425b, 4}, put {460b, 5} 00000.226 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.226 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.226 II| FAKE_ENV: All BS storage groups are stopped 00000.226 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 17.71s 00000.226 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,100,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,100,0] >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> KqpConstraints::AddSerialColumnForbidden [GOOD] >> KqpConstraints::AddColumnWithDefaultForbidden >> KqpOlapJson::FilterVariantsCount[1,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,0,0] >> KqpOlapJson::FilterVariants[2,true,1,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,100,0] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,0,0.5] >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,100,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,1000000,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,0,0] >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,1000000,0.5] >> KqpOlapJson::CompactionVariants[10,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomNGrammIndexesVariants[true,true,1,10,1000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63414, MsgBus: 13152 2025-10-24T16:21:57.957720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824327845176033:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:57.957808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001330/r3tmp/tmphbJLsA/pdisk_1.dat 2025-10-24T16:21:58.001614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:58.001905Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:58.002121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824327845175927:2081] 1761322917956986 != 1761322917956989 TServer::EnableGrpc on GrpcPort 63414, node 1 2025-10-24T16:21:58.009988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:58.010000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:58.010002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:58.010040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13152 TClient is connected to server localhost:13152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:58.065067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:58.065096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:58.066122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:21:58.067288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:58.069573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:21:58.290498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824332140143881:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.290527Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.290628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824332140143891:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.290639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:21:58.295086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:21:58.330662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:21:58.345223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:58.345287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:58.345338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:58.345368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:58.345391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:58.345415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:58.345443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:58.345468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:58.345503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:58.345531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:58.345554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:58.345577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:58.345599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824332140143974:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:58.346206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824332140143989:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:58.346231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:58.346240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:58.346249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824332140143989:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:58.346255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:58.346261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:58.346283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:58.346289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824332140143989:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:58.346296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:58.346308Z node 1 :TX_COLUMNSHARD WARN: lo ... SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 5 Indexes { Id: 3 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 4 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:17.733118Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824411018552207:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:17.740030Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:17.740053Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:17.740058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=2; 2025-10-24T16:22:17.740062Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=3;to=4; 2025-10-24T16:22:17.740066Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=4;to=5; 2025-10-24T16:22:17.740263Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322936793;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:17.740276Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322936842;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:17.740280Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322936877;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:17.740284Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322937136;tx_id=281474976710667;;is_diff=1;version=4; 2025-10-24T16:22:17.740392Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322937157;tx_id=281474976710668;;new_schema=Id: 0 SinceStep: 1761322937157 SinceTxId: 281474976710668 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 5 Indexes { Id: 3 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 4 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:17.740999Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:17.741147Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;task_id=9be34d72-b0f511f0-973f3496-a5f11712;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:17.744242Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;task_id=9be47fb2-b0f511f0-af820aec-b17aab67;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a1" ORDER BY Col1; COMPARE: [[1u;["{\"a.b.c\":\"a1\"}"]]] OUTPUT: [[1u;["{\"a.b.c\":\"a1\"}"]]] INDEX:5/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1B4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "1b5" ORDER BY Col1; 2025-10-24T16:22:18.731629Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824411018552207:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:18.732325Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824411018552207:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; 2025-10-24T16:22:18.744034Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:18.744256Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824411018552204:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:5/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1b3" ORDER BY Col1; COMPARE: [[13u;["{\"b.c.d\":\"1b3\"}"]]] OUTPUT: [[13u;["{\"b.c.d\":\"1b3\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1B3" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/4/1 HEADER:0/0/0 >> KqpOlapJson::CompactionVariants[10,false,1,10,0,0] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,100,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,1000000,0.5] >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,0,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,100,0] >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,1000000,0.5] >> KqpOlapJson::FilterVariants[10,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,1000000,0] >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[1,false,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7979, MsgBus: 21531 2025-10-24T16:22:10.521916Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824382118679038:2250];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:10.521934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:10.550141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126a/r3tmp/tmpQ1xndy/pdisk_1.dat 2025-10-24T16:22:10.834161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:10.834188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:10.835181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:10.846133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:10.847145Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824382118678817:2081] 1761322930488951 != 1761322930488954 2025-10-24T16:22:10.850868Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7979, node 1 2025-10-24T16:22:10.860341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:10.860352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:10.860355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:10.860391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21531 TClient is connected to server localhost:21531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:11.008664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:11.014135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:11.042660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:11.531576Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:11.901717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824386413646800:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.902729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.914347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824386413646810:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.914376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.269217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:12.367072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.367172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.367394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.367431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.367452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.367481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.367508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.367529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:12.367548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:12.367569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:12.367588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:12.367614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:12.367640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390708614162:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:12.372181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:12.372195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:12.372208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:12.372212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:12.372230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:12.372234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:12.372247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:12.372252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalize ... pp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:19.165649Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:19.165664Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:19.165669Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:19.165684Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:19.165690Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:19.165701Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:19.165705Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:19.166603Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824421836270876:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=54244287072352;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322939166;max=18446744073709551615;plan=0;src=[6:7564824417541303207:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.169133Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.169150Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.169153Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:19.183394Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:19.183912Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836270949:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.184084Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.185782Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836270961:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.185828Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.187238Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:19.206301Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836270981:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.206325Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.206367Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836270986:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.206372Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.206767Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:19.217653Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:19.227396Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836271014:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.227420Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.227532Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836271020:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.227532Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824421836271019:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.227539Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.228265Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:19.231120Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:19.231173Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824421836271023:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:19.332443Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824421836271074:2437] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,100,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,1000000,0] >> KqpOlap::EmptyColumnsRead >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,1000000,0] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,0,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,0,0.5] >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,100,0.5] >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants[1,true,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 25849, MsgBus: 5061 2025-10-24T16:22:10.594657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:10.594725Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824385730754761:2187];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:10.594881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00131b/r3tmp/tmpWjS0by/pdisk_1.dat 2025-10-24T16:22:10.861934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:10.861973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:10.862281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:10.866039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:10.873778Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25849, node 1 2025-10-24T16:22:10.916612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:10.916623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:10.916624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:10.916659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5061 TClient is connected to server localhost:5061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:11.136724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:11.162197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:11.195951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:11.596056Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:12.614107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824394320689882:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.614190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.615963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824394320689891:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.615990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.874874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:12.929804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.930033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.930249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.930271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.930421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.930577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.930599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.930619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:12.930639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:12.930660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:12.930843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:12.930865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:12.930886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824394320689944:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:12.940651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:12.940667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:12.940679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:12.940684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:12.940702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:12.940706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:12.940716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:12.940720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:12.940726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME ... .cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:19.419651Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:19.419656Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:19.419661Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:19.419667Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:19.419680Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:19.419685Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:19.419694Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:19.419698Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:19.421591Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824424260225606:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=20449336747232;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322939421;max=18446744073709551615;plan=0;src=[6:7564824419965257952:2152];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.437313Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.437336Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:19.437339Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:19.451245Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:19.451684Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225678:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.451807Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.451963Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225690:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.451970Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.457439Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:19.469724Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:19.470101Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225710:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.470176Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.470299Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225722:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.470305Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.472612Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-10-24T16:22:19.479781Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225743:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.479797Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.479936Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225748:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.479944Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824424260225749:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.479956Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.480608Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:19.482839Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:19.482894Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824424260225752:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:19.556562Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824424260225803:2436] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] OUTPUT: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:19.896013Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,100,0] >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,0,0.5] >> KqpOlapJson::OrFilterVariants[1,false,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::BrokenJsonWriting[2,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,0,0] >> KqpConstraints::AddColumnWithDefaultForbidden [GOOD] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError >> KqpOlapJson::FilterVariants[2,true,1,0,100,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,100,0.5] >> KqpOlapAggregations::Aggregation_Some_GroupBy [GOOD] >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,100,0] >> KqpOlapJson::FilterVariants[1,true,0,10,100,0] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,10,1000000,0] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,1000,0,0.5] >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,100,0.5] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,10,0,0.5] >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> KqpBoolColumnShard::TestGroupByBool-SCAN_QUERY-COLUMNSHARD-CSV [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE >> KqpOlapJson::OrFilterVariants[2,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,100,0] >> KqpOlapJson::QuotedFilterVariants[2,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,1000000,0.5] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,1000000,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 61761, MsgBus: 11741 2025-10-24T16:22:09.190701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824379538497401:2254];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:09.190728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b4c/r3tmp/tmpfVtVr2/pdisk_1.dat 2025-10-24T16:22:09.504324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:09.505844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:09.506036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:09.512306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:09.543927Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:09.547238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824379538497175:2081] 1761322929153314 != 1761322929153317 TServer::EnableGrpc on GrpcPort 61761, node 1 2025-10-24T16:22:09.589804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:09.589817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:09.589819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:09.589868Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11741 TClient is connected to server localhost:11741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:09.717359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:09.727112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:09.783468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:10.183968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:11.028328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824388128432459:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.028392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.028636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824388128432469:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.028644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.203924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:11.296218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:11.296389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:11.296692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:11.297178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:11.297196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:11.297293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:11.297335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:11.297415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:11.297431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:11.297445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:11.297463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:11.297547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:11.297548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:11.297571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824388128432587:2325];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:11.297696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:11.297714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:11.297730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:11.297880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:11.297899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:11.298183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824388128432589:2327];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:11.298354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186 ... Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382162Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382294Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382445Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382599Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382733Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.382848Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.383064Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:20.383185Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:22:20.403659Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824428170693802:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.403688Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.403895Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824428170693807:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.403907Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824428170693808:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.403913Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.404844Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:20.407914Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:20.407993Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824428170693811:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:20.486971Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824428170693862:2683] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; 2025-10-24T16:22:20.635013Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,100,0.5] >> KqpOlapJson::CompactionVariants[10,false,1,10,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,10,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,1000000,0.5] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,100,0] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::OrFilterVariants[1,false,1024,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 22646, MsgBus: 5966 2025-10-24T16:22:09.670568Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824381714535479:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:09.670975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f54/r3tmp/tmprMj7Ud/pdisk_1.dat 2025-10-24T16:22:10.031325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:10.031349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:10.033685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:10.067185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:10.087151Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:10.089529Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824381714535430:2081] 1761322929666037 != 1761322929666040 TServer::EnableGrpc on GrpcPort 22646, node 1 2025-10-24T16:22:10.212017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:10.212028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:10.212030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:10.212255Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:10.360978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5966 TClient is connected to server localhost:5966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:10.596933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:10.620611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:10.671930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:11.732936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824390304470716:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.733014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.733164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824390304470726:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.733170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.959457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:12.047045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.047150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.047209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.047226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.047244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.047267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.047287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.047305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:12.047321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:12.047336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:12.047352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:12.047367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:12.047790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824390304470780:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:12.064686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:12.064702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:12.064890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:12.064895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:12.064912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:12.064916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:12.064926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:12.065130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:12.065137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 20 ... N: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:20.882663Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:20.882669Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:20.882675Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:20.882681Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:20.882694Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:20.882698Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:20.882707Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:20.882711Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:20.887985Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824427038900113:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=55728236487008;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322940887;max=18446744073709551615;plan=0;src=[7:7564824427038899738:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:20.893573Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:20.893590Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:20.893593Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:20.899528Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900187:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.899540Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.899629Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900189:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.899633Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.901454Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:20.907123Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:20.911983Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900219:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.912000Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.912083Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900222:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.912088Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.913216Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:20.919422Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:20.930636Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900252:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.930654Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.930784Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900257:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.930790Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824427038900258:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.930828Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.931505Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:20.933810Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824427038900261:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:20.999610Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824427038900312:2440] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE (JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3") OR (JSON_VALUE(Col2, "$.b") = "b1" AND JSON_VALUE(Col2, "$.c") = "c1") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:4/0/0 HEADER:0/0/0 2025-10-24T16:22:21.291633Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::FilterVariants[10,false,0,1000,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[10,false,0,1000,1000000,0.5] >> KqpOlapJson::FilterVariants[2,true,1,0,100,0.5] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,100,0] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,100,0.5] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0,BLOOM_FILTER] >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,1000,0,0.5] [GOOD] >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,10,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,100,0.5] >> KqpOlapJson::DoubleFilterVariants[1,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,false,1024,1000,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,1000000,0] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,100,0] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,100,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,100,0.5] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-COLUMNSHARD-CSV >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,1000000,0] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[10,true,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1581, MsgBus: 5640 2025-10-24T16:22:13.084803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824398455681150:2255];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.084820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:13.125565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001314/r3tmp/tmpL7H1Sq/pdisk_1.dat 2025-10-24T16:22:13.388946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.388969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.390655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:13.436860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:13.452210Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1581, node 1 2025-10-24T16:22:13.582323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:13.582337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:13.582338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:13.582378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5640 TClient is connected to server localhost:5640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.057885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.074091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:14.084861Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:14.988079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824402750648914:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.988122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.988254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824402750648924:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.988260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.113699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.212685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.212886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.213090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.213107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.213127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.213149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.213170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.213340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.213519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407045616336:2328];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.217605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.217621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.223883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.223922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.223942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.223964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.223988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.224021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.224041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.224066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.224085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.224103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824407045616334:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.224125Z node 1 :TX_COLUMNSHARD WA ... owedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611337Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611393Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611483Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611613Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611622Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611761Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611761Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:22.611915Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:22.621562Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824435330449290:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.621583Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.621691Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824435330449295:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.621697Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824435330449296:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.621746Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.622544Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:22.624757Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:22.624874Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824435330449299:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:22.680484Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824435330449350:2688] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:23.016235Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[1,false,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 9199, MsgBus: 61174 2025-10-24T16:22:13.393300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824398986349426:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.403779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001310/r3tmp/tmpVlv7gP/pdisk_1.dat 2025-10-24T16:22:13.689345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.689366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.693741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:13.721415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:13.740760Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:13.741648Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824398986349380:2081] 1761322933387244 != 1761322933387247 TServer::EnableGrpc on GrpcPort 9199, node 1 2025-10-24T16:22:13.888330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:13.888344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:13.888346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:13.888397Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:13.936437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61174 TClient is connected to server localhost:61174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.330308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.348145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:14.385890Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:15.740387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407576284668:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.740420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.740538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407576284678:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.740542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.808720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.829726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.829810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.829867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.829887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.829907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.829930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.829958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.829977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.830008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.830027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.830048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.830068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.830090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407576284730:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:15.835274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:15.835298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:15.835314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:15.835319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:15.835338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:15.835343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:15.835353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:15.835358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:15.835365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2 ... tion=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:22.864514Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:22.864520Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:22.864526Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:22.864541Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:22.864547Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:22.864557Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:22.864562Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:22.871747Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824437582548588:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=125868226279376;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322942871;max=18446744073709551615;plan=0;src=[6:7564824437582548196:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:22.873037Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:22.873049Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:22.873051Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:22.881297Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:22.881682Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548661:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.881790Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.887873Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548673:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.887929Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.889645Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`true`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:22.901367Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:22.904041Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548693:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.904163Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.904319Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548705:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.904324Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.924275Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:22.934983Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548726:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.935002Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.935111Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548731:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.935133Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824437582548732:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.935141Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.935812Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:22.938249Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:22.938303Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824437582548735:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:22.999220Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824437582548786:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::FilterVariants[1,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,10,1000000,0.5] >> KqpOlapAggregations::Aggregation_Some_GroupByNullMix [GOOD] >> KqpOlapAggregations::Aggregation_Some_GroupByNull >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,1000000,0.5] [GOOD] >> KqpOlap::EmptyColumnsRead [GOOD] >> KqpOlap::EmptyRange >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,1000000,0.5] [GOOD] >> SystemView::PartitionStatsLocksFields [GOOD] >> SystemView::PartitionStatsFields >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0.5,BLOOM_FILTER] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,0,0.5] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,100,0] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFirstLevelVariants[2,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12224, MsgBus: 13945 2025-10-24T16:22:14.552344Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824402180228221:2249];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:14.552361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130f/r3tmp/tmpnWguNW/pdisk_1.dat 2025-10-24T16:22:14.819050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:14.819076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:14.824178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:14.824911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:14.827983Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:14.828396Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824402180228010:2081] 1761322934541078 != 1761322934541081 TServer::EnableGrpc on GrpcPort 12224, node 1 2025-10-24T16:22:14.907124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:14.907136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:14.907137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:14.907170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13945 2025-10-24T16:22:14.986252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:15.128756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:15.556089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:16.106103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824410770163285:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.106196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.107972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824410770163295:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.107985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.108082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824410770163297:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.108089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.374398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:16.438299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:16.438559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:16.438621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:16.438639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:16.438799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:16.438818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:16.438975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:16.439140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:16.439312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:16.439329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:16.439345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:16.439364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:16.439380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824410770163373:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:16.442980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:16.443169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:16.443246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:16.443266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:16.443284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:16.443302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824410770163358:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;desc ... troller::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322943404;max=18446744073709551615;plan=0;src=[6:7564824435990217260:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.409294Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.409313Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.409316Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.410702Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.410712Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.410714Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:23.419238Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185048:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.419257Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.419809Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.424404Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185052:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.424491Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.429788Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:23.429924Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`true`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:23.443550Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.443934Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185086:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.443989Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.444159Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185096:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.444170Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.456946Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:23.457134Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:23.468620Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185121:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.468642Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.471893Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185126:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.471914Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824440285185127:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.471981Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.474895Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:23.477679Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824440285185130:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:23.576352Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824440285185181:2470] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":\"{\\\"c\\\":1,\\\"e\\\":{\\\"c.a\\\":2}}\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":\"[\\\"a\\\",{\\\"v\\\":[\\\"c\\\",5]}]\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,0,0] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24248, MsgBus: 8691 2025-10-24T16:22:11.575240Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824386799906167:2259];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:11.576021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:11.579997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001316/r3tmp/tmp58zNOT/pdisk_1.dat 2025-10-24T16:22:11.800396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:11.812159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:11.812178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:11.816264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:11.852905Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24248, node 1 2025-10-24T16:22:11.860163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824386799905914:2081] 1761322931557059 != 1761322931557062 2025-10-24T16:22:11.892018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:11.892028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:11.892030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:11.892062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:11.988023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8691 TClient is connected to server localhost:8691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:12.480230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:12.492074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:12.576072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:13.624377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824395389841196:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.624406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.627290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824395389841205:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:13.627316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.222000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:14.334490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:14.335268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:14.335543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:14.335561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:14.335720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:14.336001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:14.336029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:14.336047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:14.336064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:14.336206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:14.336223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:14.336239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:14.336706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824399684808558:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:14.343567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:14.343585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:14.343600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:14.343605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:14.343624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:14.343629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:14.343640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:14.343644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCounter ... rocess=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:23.404577Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:23.404582Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:23.404590Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:23.404596Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:23.404611Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:23.404616Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:23.404626Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:23.404630Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:23.405595Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824440697851092:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=54447206656768;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322943405;max=18446744073709551615;plan=0;src=[7:7564824436402883425:2145];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.407963Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.407973Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.407976Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.416354Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851165:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.416369Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.416467Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851167:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.416472Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.418668Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.425295Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:23.436039Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851197:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.436055Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.436200Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851199:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.436208Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.438234Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.446071Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:22:23.462469Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851230:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.462489Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.462631Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851235:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.462638Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824440697851236:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.462651Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.463435Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:23.465604Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:23.465683Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824440697851239:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:23.520721Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824440697851290:2436] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; 2025-10-24T16:22:23.727579Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 >> KqpOlapJson::CompactionVariants[10,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,10,100,0] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,1000000,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,0,0] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-COLUMNSHARD-CSV [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-ARROW >> KqpOlapJson::FilterVariants[10,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,100,0] >> KqpOlapJson::DoubleFilterVariants[1,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,false,1024,1000,1000000,0.5] >> KqpOlap::ScanFailedSnapshotTooOld [GOOD] >> KqpOlap::ReverseMerge >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,1000000,0.5] >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[10,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 15296, MsgBus: 27498 2025-10-24T16:22:13.067334Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824398933604227:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.069746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:13.076931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001312/r3tmp/tmpUyUKjb/pdisk_1.dat 2025-10-24T16:22:13.386668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.386699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.394555Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:13.395537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15296, node 1 2025-10-24T16:22:13.455273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:13.564126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:13.564137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:13.564139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:13.564180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27498 TClient is connected to server localhost:27498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:14.070285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.118545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.139543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:15.186579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407523539466:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.186605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.186926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407523539476:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.186935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.317761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.390476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.390551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.390600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.390625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.390647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.390676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.390698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.390724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.390758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.390780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.390803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.390825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.390849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407523539603:2333];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:15.396403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.396428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.396464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.396490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.396509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.396529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.396556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.396576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824407523539602:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.396594Z node 1 :TX ... SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108168Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108317Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108460Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108615Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108757Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.108902Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.109046Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.109313Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:24.136018Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824446147463480:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.136068Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.143920Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824446147463485:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.143951Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824446147463486:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.143962Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.148623Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:24.151128Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:24.151175Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824446147463489:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:24.244742Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824446147463540:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24154, MsgBus: 16426 2025-10-24T16:22:11.705942Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824388536469724:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:11.705974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001318/r3tmp/tmpV7hnuJ/pdisk_1.dat 2025-10-24T16:22:11.871921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:11.874089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:11.874116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:11.874787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24154, node 1 2025-10-24T16:22:11.947159Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:12.068114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:12.123014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:12.123024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:12.123025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:12.123067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16426 TClient is connected to server localhost:16426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:12.556116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:12.588044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:12.706673Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:14.597392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824401421372262:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.598640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.600587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824401421372272:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.600601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:14.840058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:14.925526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:14.926766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:14.927212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:14.927359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:14.927375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:14.927676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:14.927693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:14.927709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:14.927724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:14.927974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:14.928132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:14.928148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:14.928285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824401421372350:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:14.928818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:14.928825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:14.928847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:14.928861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:14.928875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:14.928888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:14.928904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:14.928920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824401421372338:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10 ... 8;this=57058513409520;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322943846;max=18446744073709551615;plan=0;src=[7:7564824441035983815:2156];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852052Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852077Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852080Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852100Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852109Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:23.852111Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:23.895059Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.895932Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984284:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.896212Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.897919Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984296:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.897994Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.899424Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:23.899573Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:23.917223Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:23.917673Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984320:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.917806Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.917933Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984332:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.917938Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.930244Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:23.930454Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:22:23.940915Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984357:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.940935Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.941080Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984362:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.941089Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824441035984363:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.941147Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.941980Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:23.944833Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:23.944896Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824441035984366:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:24.025576Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824445330951713:2471] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; 2025-10-24T16:22:24.083264Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:4/0/0 HEADER:0/0/0 >> KqpOlapJson::FilterVariants[2,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::FilterVariants[2,true,1,0,1000000,0.5] >> KqpOlap::EmptyRange [GOOD] >> KqpOlap::DisablePushdownAggregate >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,1000000,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,1000000,0.5] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::FilterVariants[1,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,0,0] >> KqpOlapJson::CompactionVariants[2,false,1024,10,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,1000000,0.5] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,1000000,0] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-YDB_VALUE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariants[10,false,0,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 10023, MsgBus: 10843 2025-10-24T16:22:10.458629Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824384049872367:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:10.458643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:10.485538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00131a/r3tmp/tmpC2ynKf/pdisk_1.dat 2025-10-24T16:22:10.609683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:10.609710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:10.635009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:10.635313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:10.686127Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10023, node 1 2025-10-24T16:22:10.800308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:10.800318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:10.800320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:10.800358Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10843 2025-10-24T16:22:10.883910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:10.951242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:11.470812Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:12.199512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824392639807508:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.199607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.206983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824392639807518:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.207006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.207146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824392639807520:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.207153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.507356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:12.607516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.607619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.607668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.607686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.607701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.607726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.607744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.607761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:12.607777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:12.607796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:12.607817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:12.607833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:12.607872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824392639807651:2328];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:12.608994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.609010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.609048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.609067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.609084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.609099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824392639807652:2329];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11 ... s { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410129Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410234Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410334Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410434Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410534Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410569Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:24.410631Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:24.432049Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824445870226305:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.432080Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.432213Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824445870226310:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.432219Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824445870226311:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.432224Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.433066Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:24.436062Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:24.436123Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824445870226314:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:24.492678Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824445870226365:2689] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a\":\"a2\"}"]]] INDEX:4/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".c") = "1" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:4/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".e.\"c.a\"") = "2" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:4/0/0 HEADER:0/0/0 >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,100,0] >> KqpOlapJson::CompactionVariants[1,true,1,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[1,false,0,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 26848, MsgBus: 28653 2025-10-24T16:22:13.393256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824397624095513:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.401580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:13.437739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001315/r3tmp/tmpaE7gTo/pdisk_1.dat 2025-10-24T16:22:13.880708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.880732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.882078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:13.945478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:13.981926Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26848, node 1 2025-10-24T16:22:14.116201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:14.116212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:14.116214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:14.116252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:14.156510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28653 2025-10-24T16:22:14.404766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.548621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.561825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:15.756119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824406214030745:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.756166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.756387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824406214030755:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.756395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.857640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.973429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.974411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.974680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.974700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.975076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.975264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.975282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.975302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.975322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.975338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.975532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.975726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.976033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824406214030809:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:15.990189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:15.990208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:15.990222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:15.990371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:15.990551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:15.990555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:15.990564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:15.990569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:15.990575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_N ... escription=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:25.018500Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:25.018506Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:25.018511Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:25.018517Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:25.018523Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:25.018537Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:25.018542Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:25.018554Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:25.018558Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:25.024896Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824448729971636:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=123531753248352;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322945024;max=18446744073709551615;plan=0;src=[7:7564824444435003955:2143];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.030119Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.030139Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.030142Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:25.045154Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971708:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.045175Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.045211Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:25.045618Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971716:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.045637Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.049502Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:25.058475Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971740:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.058497Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.058610Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971743:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.058616Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.060282Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:25.065562Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:25.077617Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971773:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.077636Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.077764Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971778:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.077771Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824448729971779:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.077785Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.078506Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:25.080974Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:25.081031Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824448729971782:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:25.148761Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824448729971833:2442] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:1/0/0 HEADER:0/0/0 2025-10-24T16:22:25.469700Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[2,true,0,1000,100,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 22938, MsgBus: 25845 2025-10-24T16:22:17.728444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824412798574687:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:17.728464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:17.735513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001308/r3tmp/tmpbVj2vE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22938, node 1 2025-10-24T16:22:17.801397Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:17.816055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:17.816069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:17.816071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:17.816116Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:17.829851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:17.829881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:17.830982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25845 2025-10-24T16:22:17.914637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:18.024528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:18.030632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:18.498867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824417093542606:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.498901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.499098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824417093542615:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.499105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.537449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:18.555515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:18.555669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:18.555714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:18.555735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:18.555755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:18.555775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:18.555795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:18.555816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:18.555840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:18.555879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:18.555900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:18.555921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:18.555944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824417093542679:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:18.561525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:18.561544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:18.561558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:18.561565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:18.561592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:18.561598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:18.561610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:18.561616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:18.561623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:18.561629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalize ... tract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:25.687017Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:25.687027Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:25.687032Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:25.688093Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824447433330091:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=53661244565776;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322945688;max=18446744073709551615;plan=0;src=[7:7564824447433329678:2142];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.688410Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824447433330107:2325];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710658;this=53661244514912;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322945688;max=18446744073709551615;plan=0;src=[7:7564824447433329678:2142];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.690077Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.690092Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.690094Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.691830Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.691838Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:25.691840Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:25.706544Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:25.708659Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824447433330194:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.708864Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.709494Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824447433330206:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.709509Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.712352Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:25.712520Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:25.723585Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:25.725278Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824447433330230:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.725406Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.725825Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824447433330242:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.725837Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.727929Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:25.728195Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:22:25.733086Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824447433330107:2325];path_id=1000000890;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:25.733098Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824447433330107:2325];path_id=1000000890;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:25.733101Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037889;parent_id=[7:7564824447433330107:2325];path_id=1000000890;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:25.733124Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824447433330107:2325];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037889;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=a0a80a64-b0f511f0-80aca588-cfadc4d3;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:22:25.733152Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824447433330107:2325];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037889;event=TEvWritePortionResult;tablet_id=72075186224037889;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037889 in longTx ydb://long-tx/01k8bgek61bvgnmneywhebnydf?node_id=7 >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> KqpOlapJson::OrFilterVariants[2,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,1000000,0.5] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,0,0] >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> KqpOlapJson::DoubleFilterVariants[1,false,1024,1000,1000000,0.5] [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,1000,0,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,1000,0,0.5] >> KqpOlapJson::SimpleExistsVariants[10,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,false,1,0,0,0.5] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,100,0] >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,100,0] >> KqpOlapJson::FilterVariants[1,true,0,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,0,0.5] >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpOlap::ReverseMerge [GOOD] >> KqpOlap::RandomJsonCharacters >> KqpOlapJson::CompactionVariants[10,true,1,1000,0,0] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::FilterVariants[2,true,1,0,1000000,0.5] [GOOD] |52.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |52.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |52.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,1000000,0] >> KqpOlap::DisablePushdownAggregate [GOOD] >> KqpOlap::DuplicatesInIncomingBatch >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-CSV >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0,CATEGORY_BLOOM_FILTER] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Some_GroupByNull [GOOD] Test command err: Trying to start YDB, gRPC: 24066, MsgBus: 5051 2025-10-24T16:21:51.273032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824304027958855:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:21:51.273062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00134b/r3tmp/tmp4JkN7B/pdisk_1.dat 2025-10-24T16:21:51.319227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:21:51.323829Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:21:51.323963Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824304027958831:2081] 1761322911272909 != 1761322911272912 TServer::EnableGrpc on GrpcPort 24066, node 1 2025-10-24T16:21:51.335122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:21:51.335136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:21:51.335138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:21:51.335177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5051 2025-10-24T16:21:51.377046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:21:51.377071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:21:51.378031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:21:51.409268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:21:51.439401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-10-24T16:21:51.452604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:21:51.452674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:21:51.452717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:21:51.452745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:21:51.452768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:21:51.452790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:21:51.452814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:21:51.452835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:21:51.452872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:21:51.452893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.452911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:21:51.452929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:21:51.452947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824304027959512:2301];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:21:51.453414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:21:51.453428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:21:51.453441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:21:51.453450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:21:51.453469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:21:51.453477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:21:51.453490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:21:51.453498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:21:51.453506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:21:51.453512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:21:51.453518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:21:51.453527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:21:51.453550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:21:51.453559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:21:51.453570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:21:51.453580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:21:51.453587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:21:51.453595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.c ... state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.962984Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.962995Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.962997Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.964111Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.964119Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.964121Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.965142Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.965152Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:24.965154Z node 11 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1448;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=1448;columns=6; REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; 2025-10-24T16:22:25.117588Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7564824447536277418:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.117607Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.117784Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7564824447536277453:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.117792Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7564824447536277455:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.117802Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.118604Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:25.121530Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:22:25.121588Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7564824447536277458:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:22:25.206060Z node 11 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [11:7564824447536277509:2464] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:26.511029Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:26.596081Z node 11 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322945172, txId: 18446744073709551615] shutting down REQUEST: --!syntax_v1 PRAGMA Kikimr.OptUseFinalizeByKey; SELECT level, SOME(id), SOME(level) FROM `/Root/tableWithNulls` WHERE id = 6 GROUP BY level ORDER BY level; JSON Plan: {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["tableWithNulls"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.level","Aggregation":"{state._yql_agg_0,state._yql_agg_1}","Name":"Aggregate","Phase":"Intermediate"},{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tableWithNulls","E-Rows":"0","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id [6, 6]","level"],"SsaProgram":{"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3}]}}]},"E-Cost":"0","ReadRangesExpectedSize":"1"}],"Node Type":"Aggregate-TableRangeScan"}],"HashFunc":"HashV2","Node Type":"HashShuffle","KeyColumns":["level"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.level","Name":"Sort"},{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Sort-Aggregate"}],"Node Type":"Merge","SortColumns":["level (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tableWithNulls","reads":[{"columns":["id","level"],"scan_by":["id [6, 6]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"tableWithNulls","ReadRangesKeys":["id"],"ReadColumns":["id [6, 6]","level"],"SsaProgram":{"Command":[{"Projection":{"Columns":[{"Id":1},{"Id":3}]}}]},"E-Cost":"0","ReadRangesExpectedSize":"1"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.level","Aggregation":"{state._yql_agg_0,state._yql_agg_1}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"level\"], HashFunc: \"HashV2\")","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"SortBy":"row.level","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} AST: ( (declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)) (TupleType (OptionalType (DataType 'Int32)) (DataType 'Int32)))))) (let $1 '('('"_logical_id" '1320) '('"_id" '"31c45dbf-25285971-fdb63ea0-66c81b5c") '('"_partition_mode" '"single"))) (let $2 (DqPhyStage '() (lambda '() (block '( (let $26 (Int32 '"6")) (let $27 (Just $26)) (let $28 (Int32 '1)) (let $29 '($27 $28)) (let $30 (If (== $26 (Int32 '2147483647)) $29 '((+ $27 $28) (Int32 '0)))) (return (ToStream (Just '((RangeFinalize (RangeMultiply (Uint64 '10000) (RangeUnion (RangeCreate (AsList '($29 $30)))))))))) ))) $1)) (let $3 (DqCnValue (TDqOutput $2 '0))) (let $4 (KqpPhysicalTx '($2) '($3) '() '('('"type" '"compute")))) (let $5 '"%kqp%tx_result_binding_0_0") (let $6 (DataType 'Int32)) (let $7 (OptionalType $6)) (let $8 (TupleType $7 $6)) (let $9 (TupleType (ListType (TupleType $8 $8)))) (let $10 (lambda '($47 $48 $49) $48 $49 $47)) (let $11 '('"level" $7)) (let $12 (StructType '('_yql_agg_0 $6) '('_yql_agg_1 $7) $11)) (let $13 '('('"_logical_id" '1379) '('"_id" '"fdc02df2-e654bbfb-1ed555ba-d9c728ba") '('"_wide_channels" $12))) (let $14 (DqPhyStage '() (lambda '() (block '( (let $31 (KqpTable '"/Root/tableWithNulls" '"72057594046644480:2" '"" '1)) (let $32 '('('"UsedKeyColumns" '('"id")) '('"ExpectedMaxRanges" '1) '('"PointPrefixLen" '1))) (let $33 (KqpBlockReadOlapTableRanges $31 %kqp%tx_result_binding_0_0 '('"id" '"level") '() $32 (lambda '($36) $36))) (let $34 (lambda '($39 $40 $41) $40 $41)) (let $35 (lambda '($42 $43 $44 $45 $46) $45 (Coalesce $46 $44))) (return (DqPhyHashCombine (WideFromBlocks (FromFlow $33)) '-1073741824 (lambda '($37 $38) $38) $34 $35 $10)) ))) $13)) (let $15 (DqCnHashShuffle (TDqOutput $14 '0) '('2) '0 '"HashV2")) (let $16 (StructType '('"column1" $6) '('"column2" $7) $11)) (let $17 '('('"_logical_id" '2376) '('"_id" '"b0f25d1-9f7cc132-3f3c5de7-d57d139e") '('"_wide_channels" $16))) (let $18 (DqPhyStage '($15) (lambda '($50) (block '( (let $51 (lambda '($57 $58 $59 $60) $58 $59)) (let $52 (lambda '($61 $62 $63 $64 $65 $66) $65 (Coalesce $66 $63))) (let $53 (WideCombiner (ToFlow $50) '"" (lambda '($54 $55 $56) $56) $51 $52 $10)) (return (FromFlow (WideSort $53 '('('2 (Bool 'true)))))) ))) $17)) (let $19 (DqCnMerge (TDqOutput $18 '0) '('('2 '"Asc")))) (let $20 (DqPhyStage '($19) (lambda '($67) (FromFlow (NarrowMap (ToFlow $67) (lambda '($68 $69 $70) (AsStruct '('"column1" $68) '('"column2" $69) '('"level" $70)))))) '('('"_logical_id" '2388) '('"_id" '"1dc2d335-1755fa96-427233e1-d04c9ed2")))) (let $21 '($14 $18 $20)) (let $22 '('"level" '"column1" '"column2")) (let $23 (DqCnResult (TDqOutput $20 '0) $22)) (let $24 (KqpTxResultBinding $9 '0 '0)) (let $25 (KqpPhysicalTx $21 '($23) '('($5 $24)) '('('"type" '"scan")))) (return (KqpPhysicalQuery '($4 $25) '((KqpTxResultBinding (ListType $16) '1 '0)) '('('"type" '"scan_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[1,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 27399, MsgBus: 31064 2025-10-24T16:22:15.072186Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824405029942461:2083];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:15.074112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130d/r3tmp/tmpVtiUFK/pdisk_1.dat 2025-10-24T16:22:15.248840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:15.248865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:15.249755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:15.251883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:15.287884Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27399, node 1 2025-10-24T16:22:15.396335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:15.396346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:15.396348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:15.396383Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:15.452374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31064 TClient is connected to server localhost:31064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:15.858004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:15.868575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:16.076144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:16.920258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824409324910387:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.920350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.924046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824409324910397:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.924076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.924168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824409324910399:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.924174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:17.100005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:17.220243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:17.220764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:17.220836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:17.221040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:17.221059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:17.221228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:17.221246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:17.221263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:17.221462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:17.221622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:17.221640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:17.221829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:17.221847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824413619877751:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:17.231451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:17.231468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:17.231632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:17.231637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:17.231818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:17.231823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:17.231833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:17.231837Z node 1 :TX_COLUMNSHARD WARN: log. ... D WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:26.316637Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:26.316643Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:26.316659Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:26.316664Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:26.316676Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:26.316680Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:26.320355Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824454303194756:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=20016580422864;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322946320;max=18446744073709551615;plan=0;src=[7:7564824450008227105:2155];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:26.321985Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:26.321994Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:26.321997Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:26.331476Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194831:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.331495Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.331615Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194834:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.331622Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.333612Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:26.336586Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:26.343494Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194863:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.343519Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.343962Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:26.344488Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194868:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.344590Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.348166Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:26.360231Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194896:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.360258Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.360416Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194901:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.360426Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824454303194902:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.360479Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.361379Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:26.364581Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:26.364658Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824454303194905:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:26.420796Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824454303194956:2441] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:26.711415Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,1000000,0] >> KqpOlapJson::CompactionVariants[10,false,1,10,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,10,100,0.5] >> KqpOlapJson::OrFilterVariants[2,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,100,0] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,0,0.5,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,1000,0,CATEGORY_BLOOM_FILTER] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> KqpQueryPerf::Update+QueryService-UseSink >> KqpOlapJson::SimpleExistsVariants[10,false,1,0,0,0.5] [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,1000,0,0.5] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,1000000,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,1000000,0.5] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,1000000,0.5] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,0,0.5] >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,0,0.5] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,100,0] >> KqpOlap::RandomJsonCharacters [GOOD] >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-CSV [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,10,1000000,0] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,1000000,0.5] >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,1000000,0] >> KqpOlap::DuplicatesInIncomingBatch [GOOD] >> KqpOlap::DoubleOutOfRangeInJson ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariants[2,true,1,0,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 30802, MsgBus: 10775 2025-10-24T16:22:13.280967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:13.281849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824399107103069:2261];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.282056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001313/r3tmp/tmpRPF5PK/pdisk_1.dat 2025-10-24T16:22:13.542560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.547945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.552183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:13.577453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:13.590505Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:13.592071Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824399107102832:2081] 1761322933227501 != 1761322933227504 TServer::EnableGrpc on GrpcPort 30802, node 1 2025-10-24T16:22:13.728184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:13.728195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:13.728197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:13.728229Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:13.749171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10775 TClient is connected to server localhost:10775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.231552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.254022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:14.261571Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:15.184896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407697038112:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.184916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.185017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824407697038122:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.185021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.298071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.347059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.352138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.352441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.352870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.353048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.353068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.353241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.353265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.353286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.353434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.353457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.353479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.353500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.353520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824407697038188:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:15.359144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.359230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.359257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.359279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.359304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.359325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824407697038200:2326];tablet_id=7207518622 ... pp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:26.674561Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:26.674563Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:26.691165Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794304:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.691183Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.691301Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794306:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.691309Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.693580Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:26.698659Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:26.698783Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:26.714750Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794340:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.714772Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.714908Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794343:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.714926Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.716098Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:26.720190Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:26.720368Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:26.732023Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794377:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.732050Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.732221Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794382:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.732232Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824451356794383:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.732291Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.733127Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:26.735552Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824451356794386:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:26.788802Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824451356794437:2467] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".c") = "1" ORDER BY Col1; 2025-10-24T16:22:27.011473Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".e.\"c.a\"") = "2" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:2/0/0 HEADER:0/0/0 >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,1000000,0.5] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,100,0.5] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,true,0,1000,0,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::OrFilterVariants[2,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 5785, MsgBus: 14106 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001309/r3tmp/tmphFIvAg/pdisk_1.dat 2025-10-24T16:22:17.379928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:17.379980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:17.387845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:17.395538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:17.400899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:17.509585Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:17.511918Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824413426561160:2081] 1761322937007078 != 1761322937007081 TServer::EnableGrpc on GrpcPort 5785, node 1 2025-10-24T16:22:17.552238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:17.582185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:17.582198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:17.582199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:17.582237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14106 TClient is connected to server localhost:14106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:18.054707Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:18.058126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:18.078660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:18.552002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824417721529142:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.552026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.552712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824417721529151:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.552729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.599394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:18.614822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:18.614958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:18.615001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:18.615022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:18.615042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:18.615059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:18.615077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:18.615095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:18.615113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:18.615134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:18.615153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:18.615173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:18.615191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824417721529214:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:18.616643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:18.616660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:18.616674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:18.616679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:18.616699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:18.616704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:18.616715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:18.616719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:18.616726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:18.616731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromC ... HARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:27.273048Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824455165318187:2324];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715658;this=54544877841328;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761322947272;max=18446744073709551615;plan=0;src=[7:7564824450870350518:2155];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274341Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274352Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274354Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274870Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274891Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.274894Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:27.284935Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318274:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.284957Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.285059Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318276:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.285064Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.287489Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.289877Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.290021Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:27.304398Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.305056Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318310:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.305108Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.305246Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318322:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.305252Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.306766Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.306985Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:27.315422Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318347:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.315448Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.315641Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318352:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.315648Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824455165318353:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.315653Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.316550Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:27.323529Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824455165318356:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:27.388704Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824455165318407:2465] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE (JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3") OR (JSON_VALUE(Col2, "$.b") = "b1" AND JSON_VALUE(Col2, "$.c") = "c1") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:8/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[10,false,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12492, MsgBus: 29713 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001304/r3tmp/tmp1yu5sJ/pdisk_1.dat 2025-10-24T16:22:18.886323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:18.886415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:18.890617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:18.890646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:18.892168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:18.896128Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:18.903908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824418475415890:2081] 1761322938826061 != 1761322938826064 TServer::EnableGrpc on GrpcPort 12492, node 1 2025-10-24T16:22:18.912635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:18.912644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:18.912645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:18.912681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29713 TClient is connected to server localhost:29713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:19.028600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:19.039151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:19.065960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:19.308760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824422770383858:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.308780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.308903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824422770383868:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.308909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.352513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:19.405608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:19.405673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:19.405845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:19.405864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:19.405977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:19.405996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:19.406026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:19.406151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:19.406274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:19.406293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:19.406313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:19.406432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:19.406555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824422770384004:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:19.410464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:19.410489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:19.410544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:19.410564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:19.410583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:19.410604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:19.410626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:19.410646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:19.410664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824422770383998:2324];tablet_id=72075186224037896;process=TTxInitSchema::Execute;flin ... =281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799264Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799284Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799420Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799432Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799523Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799570Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799629Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.799713Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:27.806406Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824459007459323:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.806447Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.806457Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824459007459328:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.806493Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824459007459330:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.806499Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.807820Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:27.814316Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:27.815913Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824459007459331:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:27.880071Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824459007459383:2683] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:28.223563Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,false,1,10,1000000,0.5] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestGroupByBool-EXECUTE_QUERY-DATASHARD-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 28156, MsgBus: 22444 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130e/r3tmp/tmp2xzCfX/pdisk_1.dat 2025-10-24T16:22:14.930409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:14.930438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:14.931369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:14.931638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:14.967563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:14.970540Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:14.971948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824403003420931:2081] 1761322934820538 != 1761322934820541 TServer::EnableGrpc on GrpcPort 28156, node 1 2025-10-24T16:22:15.072077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:15.072088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:15.072090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:15.072125Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22444 2025-10-24T16:22:15.192891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:15.383666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:15.408335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:15.835895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:16.839079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824411593356213:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.839171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.844060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824411593356223:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.844106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.844567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824411593356225:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:16.844642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:17.084890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:17.197775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:17.199333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:17.199562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:17.199579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:17.199778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:17.200068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:17.200091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:17.200107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:17.200123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:17.200139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:17.200157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:17.200475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:17.200661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824415888323579:2329];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:17.205219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:17.205231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:17.205243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:17.205248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:17.205265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:17.205269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:17.205278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:17.205282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888; ... ce] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.102213Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458990590877:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.102242Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.102415Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458990590882:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.102424Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458990590883:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.102429Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.103454Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:27.106465Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:22:27.106515Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824458990590886:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:27.184640Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824458990590937:2407] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 21686, MsgBus: 9255 2025-10-24T16:22:27.676614Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7564824459010598901:2094];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.684334Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:27.684431Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130e/r3tmp/tmpIfITlg/pdisk_1.dat 2025-10-24T16:22:27.717750Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:27.717774Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:27.720977Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:27.721385Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:27.721778Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:7564824459010598821:2081] 1761322947670035 != 1761322947670038 TServer::EnableGrpc on GrpcPort 21686, node 7 2025-10-24T16:22:27.737073Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:27.737084Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:27.737085Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:27.737118Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9255 2025-10-24T16:22:27.767482Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:27.870086Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:27.873911Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:28.164316Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824463305566792:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.165124Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.166083Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.167298Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824463305566813:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.167391Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.233933Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824463305566903:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.233960Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.234098Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824463305566908:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.234108Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824463305566909:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.234166Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.234972Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:28.237310Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:22:28.237560Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824463305566912:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:22:28.332812Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824463305566963:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[2,true,1024,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 32097, MsgBus: 24638 2025-10-24T16:22:19.054536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824424771336431:2258];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:19.054603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001301/r3tmp/tmpATwLfp/pdisk_1.dat 2025-10-24T16:22:19.113620Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:19.113630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 32097, node 1 2025-10-24T16:22:19.128746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:19.128756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:19.128758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:19.128799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24638 2025-10-24T16:22:19.160215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:19.160240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:19.163342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:19.200347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:19.202617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:19.328939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:19.706525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824424771336870:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.706560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.707058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824424771336880:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.707063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.707148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824424771336882:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.707152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.970529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:20.055909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:20.056806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:20.056883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:20.056901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:20.056917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:20.057106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:20.057285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:20.057492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:20.057596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:20.057602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:20.057621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:20.057635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:20.057648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:20.057662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:20.057676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:20.057683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:20.057694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:20.057705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:20.057723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824429066304255:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:20.057903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824429066304243:2324];tablet ... : log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.809941Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.809945Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.813592Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.813809Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.813817Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:27.826122Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.826538Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315708:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.826553Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.826913Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315720:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.826921Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.833774Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.833918Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:27.841893Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315744:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.841914Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.841995Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315747:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.842000Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.843569Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.847698Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:27.847876Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:27.855932Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315781:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.855965Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.856064Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315786:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.856070Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824458806315787:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.856075Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.856927Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:27.861733Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:27.861811Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824458806315790:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:27.956673Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824458806315841:2467] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:28.223111Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[1,true,1024,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 7014, MsgBus: 11363 2025-10-24T16:22:15.764341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824405516858076:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:15.764357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130b/r3tmp/tmpPAamGR/pdisk_1.dat 2025-10-24T16:22:16.186932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:16.187142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:16.188220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:16.245623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:16.274286Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:16.275790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824405516858050:2081] 1761322935759711 != 1761322935759714 TServer::EnableGrpc on GrpcPort 7014, node 1 2025-10-24T16:22:16.408331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:16.408342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:16.408343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:16.408378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:16.449674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11363 2025-10-24T16:22:16.776061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:16.865643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:16.885449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:17.945654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824414106793328:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:17.946576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:17.947835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824414106793338:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:17.947870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.141325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:18.191277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:18.191797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:18.192019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:18.192043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:18.192063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:18.192085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:18.192198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:18.192216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:18.192353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:18.192373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:18.192391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:18.192410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:18.192528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824418401760690:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:18.195914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:18.195929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:18.195941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:18.195945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:18.195964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:18.195969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:18.196247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:18.196252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:18.196259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormali ... er_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:27.664595Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:27.664602Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:27.664612Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:27.664619Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:27.664636Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:27.664642Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:27.664654Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:27.664659Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:27.668418Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824456716983810:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=20651249054368;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322947668;max=18446744073709551615;plan=0;src=[7:7564824456716983452:2154];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.673551Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.673571Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:27.673574Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:27.689529Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983882:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.689552Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.689813Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.690061Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983890:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.690084Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.692023Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:27.707786Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983914:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.707815Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.708035Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983917:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.708045Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.710137Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:27.714031Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:22:27.723080Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983947:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.723101Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.723245Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983952:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.723254Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824456716983953:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.723268Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.724057Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:27.729204Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:27.729283Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824456716983956:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:27.798349Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824456716984007:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 2025-10-24T16:22:28.242944Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,100,0] >> KqpOlap::DoubleOutOfRangeInJson [GOOD] >> KqpOlap::CountWithPredicate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::RandomJsonCharacters [GOOD] Test command err: Trying to start YDB, gRPC: 17509, MsgBus: 30183 2025-10-24T16:22:01.923107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824345133221590:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:01.923265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00126c/r3tmp/tmpUJn17Y/pdisk_1.dat 2025-10-24T16:22:02.099421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:02.105293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:02.105319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:02.106136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:02.128268Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:02.128520Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824345133221478:2081] 1761322921913386 != 1761322921913389 TServer::EnableGrpc on GrpcPort 17509, node 1 2025-10-24T16:22:02.208880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:02.208890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:02.208892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:02.209075Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:02.324420Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30183 TClient is connected to server localhost:30183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:02.420236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:02.459264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-10-24T16:22:02.492376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.492432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.492485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.492507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.492533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.492555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.492582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.492608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.492628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.492649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.492674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.492695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.492715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824349428189476:2302];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.500701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:02.500720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:02.500760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:02.500780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:02.500800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:02.500821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:02.500840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:02.500859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:02.500921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:02.500988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:02.501007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:02.501027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:02.501047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824349428189475:2301];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:02.502581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_fi ... x/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:28.505101Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:28.505128Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:28.505179Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:28.505198Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:28.505219Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:28.505238Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:28.505259Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:28.505279Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:28.505299Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:28.505316Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:28.505339Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:28.505357Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:28.505377Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:28.510240Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:28.510257Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:28.510272Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:28.510278Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:28.510300Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:28.510306Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:28.510319Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:28.510324Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:28.510332Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:28.510338Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:28.510345Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:22:28.510350Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:22:28.510377Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:22:28.510383Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:22:28.510397Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:28.510402Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:28.510407Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:28.510413Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:28.510419Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:28.510424Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:28.510437Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:28.510442Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:28.510452Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:28.510456Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:28.513463Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[8:7564824459691686147:2530];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=123987088500240;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1761322948513;max=18446744073709551615;plan=0;src=[8:7564824455396716260:2139];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=18;result=not_found; 2025-10-24T16:22:28.522806Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=18;result=not_found; 2025-10-24T16:22:28.522824Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=18;result=not_found; 2025-10-24T16:22:28.522828Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=18;result=not_found; 2025-10-24T16:22:28.533091Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:28.551507Z node 8 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710674;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 2 Name: "json_payload" Type: "JsonDocument" TypeId: 4612 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecZSTD Level: 4 } } StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 20 ColumnsLimit: 1024 ChunkMemoryLimit: 52428800 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Crossed >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0.5,BLOOM_FILTER] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,0,0.5] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,0,0] >> KqpOlapJson::FilterVariants[1,true,0,1000,100,0] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,100,0.5] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,100,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,0,0.5] >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,1000000,0.5] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,0,0] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,1000,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,1000,0,BLOOM_FILTER] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0.5,BLOOM_FILTER] >> KqpOlapJson::CompactionVariants[10,true,1,1000,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:39.940392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:39.940418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:39.940424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:39.940430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:39.940436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:39.940440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:39.940454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:39.940467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:39.940582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:39.940642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:39.955361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:39.955383Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:39.958636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:39.958669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:39.958704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:39.960004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:39.960177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:39.960262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:39.960334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:39.961329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:39.961364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:39.961586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:39.961596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:39.961622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:39.961629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:39.961636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:39.961659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.962876Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:39.978717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:39.978789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.978833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:39.978839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:39.978889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:39.978899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:39.979533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:39.979562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:39.979607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.979613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:39.979617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:39.979621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:39.979921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.979930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:39.979934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:39.980338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.980344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:39.980348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:39.980354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:39.980781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:39.981151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:39.981210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:39.981401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:39.981420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:39.981426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:39.981469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:39.981474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:39.981496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:39.981505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:39.981970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:39.981984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ], Recipient [3:319:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-10-24T16:22:27.288399Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-10-24T16:22:27.288429Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-10-24T16:22:27.288453Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:27.288460Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2025-10-24T16:22:27.288466Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-10-24T16:22:27.288470Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2025-10-24T16:22:27.288504Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:319:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:22:27.288591Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-10-24T16:22:27.288687Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:319:2304], Recipient [3:140:2161]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 43 Memory: 124168 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 260 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:22:27.288695Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:22:27.288710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0043 2025-10-24T16:22:27.288729Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:22:27.288737Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:22:27.288906Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435080, Sender [3:1065:3006], Recipient [3:319:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2025-10-24T16:22:27.348108Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:27.348136Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:27.348142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-10-24T16:22:27.348166Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:609: Will execute TTxStoreStats, queue# 1 2025-10-24T16:22:27.348172Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-10-24T16:22:27.348203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-10-24T16:22:27.348223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-10-24T16:22:27.348229Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-10-24T16:22:27.348250Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-10-24T16:22:27.348273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:469: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-10-24T16:22:27.348299Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:27.360038Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:27.360071Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:27.360079Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:22:27.708470Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:27.708502Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:27.708521Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:27.708526Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.120034Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.120060Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.120077Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.120081Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.548034Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.548061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.548078Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.548084Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.967985Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.968014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:28.968032Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:28.968038Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:29.367964Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:29.367990Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:29.368009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:29.368015Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:29.403982Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:319:2304]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:22:29.782226Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:29.782251Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:29.782269Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:140:2161], Recipient [3:140:2161]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:29.782275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpOlapJson::CompactionVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,10,1000000,0.5] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[2,true,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 25336, MsgBus: 9768 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001307/r3tmp/tmpzLrFJH/pdisk_1.dat 2025-10-24T16:22:18.662388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:18.662423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:18.662934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:18.662972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:18.665786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:18.677645Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824416477146678:2081] 1761322938579968 != 1761322938579971 2025-10-24T16:22:18.678719Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25336, node 1 2025-10-24T16:22:18.695339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:18.695359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:18.695362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:18.695414Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9768 TClient is connected to server localhost:9768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:18.752389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:18.755148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:18.842582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:19.036157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824420772114653:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.036201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.043880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824420772114663:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.043915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.176668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:19.202980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:19.203042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:19.203085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:19.203107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:19.203134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:19.203164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:19.203188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:19.203210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:19.203230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:19.203250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:19.203271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:19.203294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:19.203317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824420772114724:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:19.204525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:19.204537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:19.204551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:19.204555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:19.204567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:19.204570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:19.204585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:19.204588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:19.204593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:19.204596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:19.204600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline ... 10-24T16:22:29.103116Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.103121Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.104189Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.104212Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.104216Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:29.119926Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913887:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.119946Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.120095Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913889:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.120103Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.122438Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:29.129499Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:29.129586Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:29.151534Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:29.152094Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913923:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.152240Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.152344Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913935:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.152350Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.162023Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:29.162237Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:29.173045Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913960:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.173060Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.173187Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913965:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.173195Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824467735913966:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.173208Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.174036Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:29.176667Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:29.176726Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824467735913969:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:29.252661Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824467735914020:2466] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; 2025-10-24T16:22:29.412024Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[1,1,1,1],\"size\":[4,4,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[2,2,1,1]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":680,\"h_size\":426,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[1,1],\"size\":[2,11],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\"],\"records\":[1,1]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":640,\"h_size\":386,\"c_size\":0}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[2,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 29760, MsgBus: 14059 2025-10-24T16:22:19.080240Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824422815805197:2256];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:19.080334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001303/r3tmp/tmpNMDldy/pdisk_1.dat 2025-10-24T16:22:19.171086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:19.182407Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:19.182552Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824422815804969:2081] 1761322939076363 != 1761322939076366 TServer::EnableGrpc on GrpcPort 29760, node 1 2025-10-24T16:22:19.196084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:19.196095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:19.196097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:19.196142Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:19.202562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:19.202587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:19.203654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14059 TClient is connected to server localhost:14059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:19.268660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:19.276880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:19.329600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:19.884104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824422815805651:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.884129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.884256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824422815805661:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.884262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:20.080758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:20.104339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:20.195238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:20.195836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:20.196076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:20.196262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:20.196278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:20.196949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:20.196959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:20.196978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:20.196991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:20.197003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:20.197016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:20.197209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:20.197227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:20.197836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:20.198006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:20.198034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:20.198050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:20.198066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824427110773030:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:20.208419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:20.208488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824427110773041:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:20.208507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;se ... .562367Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.562370Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.563783Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.563791Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:29.563793Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:29.583213Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:29.583653Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817427:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.583896Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.584085Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817439:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.584094Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.589422Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:29.589555Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:29.599234Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:29.599623Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817463:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.599736Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.599863Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817475:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.599868Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.605069Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:29.605245Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:29.615552Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817500:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.615574Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.615688Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817506:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.615694Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824463829817505:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.615708Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.616984Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:29.621644Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:29.621711Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824463829817509:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:29.716758Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824463829817560:2464] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; 2025-10-24T16:22:29.783984Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[1],\"size\":[2],\"key_names\":[\"\\\"a.b.c\\\"\"],\"records\":[1]},\"columns\":{\"accessor\":[1],\"size\":[11],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[1]},\"o_size\":624,\"h_size\":720,\"c_size\":248}"]];[["{\"others\":{\"accessor\":[1,1,1],\"size\":[4,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[2,1,1]},\"columns\":{\"accessor\":[1],\"size\":[4],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[2]},\"o_size\":656,\"h_size\":760,\"c_size\":272}"]]] INDEX:0/0/0 HEADER:0/0/0 >> SystemView::PartitionStatsFields [GOOD] >> SystemView::QueryStatsAllTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 2025-10-24T16:21:55.217385Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 9437184 LockedInitializationPath Marker# TSYS32 2025-10-24T16:21:55.217833Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 9437184 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-10-24T16:21:55.218299Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 9437184 TTablet::WriteZeroEntry. logid# [9437184:2:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:21:55.220701Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-10-24T16:21:55.220757Z node 1 :TABLET_EXECUTOR INFO: Leader{9437184:2:0} activating executor 2025-10-24T16:21:55.220836Z node 1 :TABLET_EXECUTOR INFO: LSnap{9437184:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 2025-10-24T16:21:55.220861Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema 2025-10-24T16:21:55.220868Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-10-24T16:21:55.220908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit 2025-10-24T16:21:55.220914Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} took 4194304b of static mem, Memory{8388608 dyn 0} 2025-10-24T16:21:55.220948Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 58b annex 0, ~{ } -{ }, 0 gb} 2025-10-24T16:21:55.220956Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} release 4194304b of static, Memory{4194304 dyn 0} 2025-10-24T16:21:55.222778Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-10-24T16:21:55.222793Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-10-24T16:21:55.223418Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.223614Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.223636Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 9437184 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-10-24T16:21:55.223648Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} commited cookie 2 for step 1 2025-10-24T16:21:55.223727Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.223735Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:2:1:8192:58:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.223798Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} commited cookie 1 for step 2 2025-10-24T16:21:55.223959Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 9437184 Active! Generation: 2, Type: Dummy started in 5msec Marker# TSYS24 2025-10-24T16:21:55.224297Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxWrite} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxWrite 2025-10-24T16:21:55.224307Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxWrite} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-10-24T16:21:55.224332Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxWrite} hope 1 -> done Change{2, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-10-24T16:21:55.224339Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:3} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxWrite} release 4194304b of static, Memory{0 dyn 0} 2025-10-24T16:21:55.224687Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.224699Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:3:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.224717Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:4} commited cookie 1 for step 3 2025-10-24T16:21:55.225026Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:2:4:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-10-24T16:21:55.225041Z node 1 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:5} commited cookie 8 for step 4 2025-10-24T16:21:55.225197Z node 2 :TABLET_MAIN DEBUG: tablet_sys.cpp:876: Tablet: 9437184 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-10-24T16:21:55.226329Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:421: TabletId# 9437184 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [9437184:2:4:0:0:41:0] Snap: 2:1 for 9437184 Marker# TRRH04 2025-10-24T16:21:55.226345Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:4:0:0:41:0], refs: [] for 9437184 2025-10-24T16:21:55.226620Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:1:0:0:42:0], refs: [[9437184:2:1:1:28672:35:0],] for 9437184 2025-10-24T16:21:55.226632Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:2:0:0:71:0], refs: [[9437184:2:2:1:8192:58:0],] for 9437184 2025-10-24T16:21:55.226640Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:3:0:0:69:0], refs: [[9437184:2:3:1:24576:72:0],] for 9437184 2025-10-24T16:21:55.226649Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:625: TabletId# 9437184 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 2 from 1 with 4 steps Marker# TRRH09 2025-10-24T16:21:55.226656Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:729: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[9437184:2:1:1:28672:35:0],] for 9437184 2025-10-24T16:21:55.226662Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:729: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[9437184:2:2:1:8192:58:0],] for 9437184 2025-10-24T16:21:55.226667Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:729: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[9437184:2:3:1:24576:72:0],] for 9437184 2025-10-24T16:21:55.226672Z node 2 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:691: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [] for 9437184 2025-10-24T16:21:55.226712Z node 2 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 9437184 TTablet::WriteZeroEntry. logid# [9437184:3:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:21:55.226870Z node 2 :TABLET_EXECUTOR DEBUG: flat_load_blob_queue.cpp:110: Leader{9437184:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [9437184:2:1:1:28672:35:0] } 2025-10-24T16:21:55.227190Z node 2 :TABLET_EXECUTOR DEBUG: flat_load_blob_queue.cpp:110: Leader{9437184:3:-} sending TEvGet batch 58 bytes, 58 total, blobs: { [9437184:2:2:1:8192:58:0] } 2025-10-24T16:21:55.227479Z node 2 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:3:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-10-24T16:21:55.227725Z node 2 :TABLET_EXECUTOR DEBUG: flat_load_blob_queue.cpp:110: Leader{9437184:3:-} sending TEvGet batch 72 bytes, 72 total, blobs: { [9437184:2:3:1:24576:72:0] } 2025-10-24T16:21:55.228125Z node 2 :TABLET_EXECUTOR INFO: Leader{9437184:3:0} activating executor 2025-10-24T16:21:55.228202Z node 2 :TABLET_EXECUTOR INFO: LSnap{9437184:3, on 3:1, 94b, wait} done, Waste{2:0, 130b +(0, 0b), 4 trc} 2025-10-24T16:21:55.228220Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema 2025-10-24T16:21:55.228229Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-10-24T16:21:55.228251Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit 2025-10-24T16:21:55.228256Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} took 4194304b of static mem, Memory{8388608 dyn 0} 2025-10-24T16:21:55.228268Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-10-24T16:21:55.228275Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} release 4194304b of static, Memory{4194304 dyn 0} 2025-10-24T16:21:55.230060Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-10-24T16:21:55.230072Z node 2 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-10-24T16:21:55.230118Z node 2 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 9437184 Active! Generation: 3, Type: Dummy started in 2msec Marker# TSYS24 2025-10-24T16:21:55.230937Z node 2 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:3:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.230961Z node 2 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [9437184:3:1:1:28672:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:21:55.231012Z node 2 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 9437184 GcCollect 0 channel, tablet:gen:step => 3:0 Marker# TSYS28 2025-10-24T16:21:55.2 ... action_gen0 00000.002 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [59:30:2062]) to queue queue_compaction_gen0 00000.002 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.002 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.002 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.002 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 5 00000.002 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {tx status + 1 parts epoch 2} done 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.006 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [59:30:2062]) (release resources {1, 0}) 00000.006 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting until compacted 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 1 ...compacting 00000.006 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Mem 00000.006 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (2 by [59:30:2062]) priority=5 resources={1, 0} 00000.006 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.006 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [59:30:2062]) from queue queue_compaction_gen0 00000.006 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.006 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.006 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.006 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.006 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.006 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 7, product {0 parts epoch 2} done 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.006 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.006 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [59:30:2062]) (release resources {1, 0}) 00000.006 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting until compacted 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 0 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.006 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} release 4194304b of static, Memory{0 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.007 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 478b +(3, 191b), 9 trc, -191b acc} 00000.007 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.007 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 258 bytes, 258 total, blobs: { [1:2:2:1:8192:84:0], [1:2:6:1:32768:124:0], [1:2:8:1:32768:50:0] } 00000.007 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 384 bytes, 384 total, blobs: { [1:2:5:1:12288:158:0], [1:2:3:1:24576:78:0], [1:2:4:1:24576:65:0], [1:2:9:1:24576:83:0] } 00000.007 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.007 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.007 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.007 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.007 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 239b, wait} done, Waste{2:0, 478b +(3, 191b), 9 trc} 00000.007 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ... checking rows 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} pin 0 (0 b) load 1 (55 b) 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} postponed, loading 1 pages, 55 bytes, newly pinned 0 pages, 0 bytes 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:158:0] ok OK}, type 1 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} activated 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.008 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.008 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 478b +(0, 0b), 1 trc, -191b acc} 00000.008 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 91b} miss {0 0b} 00000.008 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.008 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {798b, 12} 00000.008 II| FAKE_ENV: DS.1 gone, left {717b, 5}, put {1117b, 11} 00000.008 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.008 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.008 II| FAKE_ENV: All BS storage groups are stopped 00000.008 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.008 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 126}, stopped >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,1000000,0] >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,100,0.5] >> KqpOlapJson::CompactionVariants[10,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariants[1,true,0,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,0,0.5] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,100,0] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,100,0.5] >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,0,0] >> KqpQueryPerf::Replace+QueryService-UseSink >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> KqpOlapJson::CompactionVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,1000,0,0] >> KqpOlapJson::DuplicationCompactionVariants[1,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,100,0] >> KqpOlapJson::QuotedFilterVariants[1,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariants[1,true,0,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 26830, MsgBus: 9498 2025-10-24T16:22:19.021172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:19.021217Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824421228218132:2270];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:19.021228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001302/r3tmp/tmpyKcMMl/pdisk_1.dat 2025-10-24T16:22:19.041431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:19.041457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:19.042597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:19.055333Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:19.058292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824421228217876:2081] 1761322939016263 != 1761322939016266 TServer::EnableGrpc on GrpcPort 26830, node 1 2025-10-24T16:22:19.066994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:19.067016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:19.067018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:19.067056Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9498 2025-10-24T16:22:19.121166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:19.144466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:19.147477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:19.576734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824421228218547:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.576767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.577110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824421228218557:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.577118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:19.647760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:19.689105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:19.690212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:19.690276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:19.690297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:19.690317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:19.690339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:19.690675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:19.690697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:19.690716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:19.690736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:19.690756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:19.690927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:19.690948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824421228218612:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:19.700843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:19.700861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:19.701046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:19.701051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:19.701222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:19.701227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:19.701238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:19.701243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:19.701423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:19.701430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:32.182783Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:32.182795Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:32.182799Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:32.185514Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824479281127053:2322];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=92323505592960;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322952185;max=18446744073709551615;plan=0;src=[7:7564824474986159409:2155];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:32.190421Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:32.190438Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:32.190441Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:32.205400Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127131:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.205417Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.205545Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127134:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.205552Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.207133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:32.217350Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:32.237121Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127163:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.237149Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.237324Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127166:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.237332Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.253355Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:32.257374Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:32.266734Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127196:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.266753Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.266888Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127201:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.266895Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824479281127202:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.266908Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.267604Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:32.277178Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824479281127205:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:32.300291Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:32.376862Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824479281127265:2441] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".c") = "1" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"e.v\".e.\"c.a\"") = "2" ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]]] INDEX:1/0/0 HEADER:0/0/0 >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0.5,CATEGORY_BLOOM_FILTER] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[10,false,1,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 8682, MsgBus: 29850 2025-10-24T16:22:13.465107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824397750749176:2138];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:13.465494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001311/r3tmp/tmpQrzUmW/pdisk_1.dat 2025-10-24T16:22:13.838542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:13.838582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:13.839148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:13.854786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:13.874947Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:13.875962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824397750749075:2081] 1761322933437493 != 1761322933437496 TServer::EnableGrpc on GrpcPort 8682, node 1 2025-10-24T16:22:13.976196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:13.976206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:13.976207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:13.976242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:14.089036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29850 TClient is connected to server localhost:29850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:14.424720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:14.453905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:14.465956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:15.536637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824406340684357:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.536738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.544109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824406340684368:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.544140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.552000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824406340684371:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.552020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:15.681100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:15.744959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.745021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.745081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.745103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.745125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.745147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:15.745170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:15.747511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824406340684501:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:15.747550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824406340684501:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:15.747685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:15.747712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:15.747733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:15.747756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:15.747779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:15.747804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824406340684515:2333];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:15.755911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824406340684501:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:15.755963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824406340684501:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:15.755982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824406340684501:2326];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:15.755999Z node 1 :TX_COLUMNSHARD WARN: l ... SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:32.737882Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:32.737885Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:32.737888Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:32.737949Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951787;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:32.737960Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951822;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:32.737964Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951850;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:32.738000Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[7:7564824473511044879:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322951878;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322951878 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:32.738952Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:32.738960Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:32.738963Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:32.739051Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951787;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:32.739058Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951822;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:32.739062Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951850;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:32.739103Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824473511044877:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322951878;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322951878 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:32.739216Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:32.739218Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:32.739221Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:32.739288Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951787;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:32.739294Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951822;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:32.739297Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322951850;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:32.739325Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824473511044875:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322951878;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322951878 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8715, MsgBus: 30043 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000db8/r3tmp/tmp9oCM3P/pdisk_1.dat 2025-10-24T16:22:28.479752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824461143203730:2085];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:28.479765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:28.486802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:28.844716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:28.844741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:28.852444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:28.869940Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:28.872936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 8715, node 1 2025-10-24T16:22:29.016256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:29.016268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:29.016271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:29.016496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30043 TClient is connected to server localhost:30043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:29.484475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:29.505463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:29.516888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:29.535728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:29.789777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.125058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.227346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.749791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824469733139921:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.749814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.749921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824469733139931:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.749937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.230928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.270897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.321654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.395269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.447678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.521784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.576824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.673757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.795274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824474028108103:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.795303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.795425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824474028108108:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.795439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824474028108109:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.795456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.798248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:31.818672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824474028108112:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:31.879494Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824474028108164:3575] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpOlapJson::CompactionVariants[10,true,1,1000,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,100,0.5] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1,10,1000,0.5,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,1000000,0.5] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,1000,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,100,0.5] >> KqpOlapJson::SimpleExistsVariants[1,false,0,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,0,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24409, MsgBus: 9606 2025-10-24T16:22:16.608207Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824412105824171:2269];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:16.608278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00130a/r3tmp/tmpxHrrxv/pdisk_1.dat 2025-10-24T16:22:16.919542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:16.919564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:16.925908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:16.931817Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:16.933681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:16.935924Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824412105823917:2081] 1761322936601441 != 1761322936601444 TServer::EnableGrpc on GrpcPort 24409, node 1 2025-10-24T16:22:17.008267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:17.008281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:17.008283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:17.008332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:17.208026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9606 TClient is connected to server localhost:9606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:17.454956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:17.479937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:17.606209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:18.324360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824420695759193:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.324419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.324524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824420695759203:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.324529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:18.373742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:18.405806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:18.405926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:18.405969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:18.405991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:18.406026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:18.406057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:18.406079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:18.406103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:18.406123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:18.406144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:18.406166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:18.406188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:18.406221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824420695759315:2324];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:18.407357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:18.407373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:18.407401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:18.407421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:18.407441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:18.407461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:18.407481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564824420695759337:2328];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:18.407500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self ... : log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824480063831773:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952823;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:22:33.510772Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[6:7564824480063831773:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322952844;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761322952844 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:33.510863Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824480063831767:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:33.510865Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824480063831767:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:33.510902Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824480063831767:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952564;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:33.510907Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824480063831767:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952613;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:33.510929Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564824480063831767:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322952823;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761322952823 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:33.511010Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:33.511012Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:33.511015Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:33.511064Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952564;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:33.511070Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952613;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:33.511073Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952823;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:22:33.511101Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564824480063831879:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322952844;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761322952844 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:33.511183Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:33.511185Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:33.511187Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:33.511252Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952564;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:33.511258Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952613;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:33.511262Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322952823;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:22:33.511290Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564824480063831783:2330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322952844;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761322952844 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,0,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,1000000,0] >> KqpQueryPerf::Upsert-QueryService-UseSink >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2555, MsgBus: 13660 2025-10-24T16:22:27.774754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:27.775462Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824455342020962:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.775497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000dcf/r3tmp/tmpM8m4ZG/pdisk_1.dat 2025-10-24T16:22:28.036062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:28.050621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:28.050649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:28.065169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:28.147324Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:28.149633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824455342020745:2081] 1761322947732417 != 1761322947732420 TServer::EnableGrpc on GrpcPort 2555, node 1 2025-10-24T16:22:28.210225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:28.295793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:28.295808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:28.295810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:28.295878Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13660 TClient is connected to server localhost:13660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:28.548478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:28.589122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:28.740370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:28.815695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:29.240050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:29.365297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.684957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824468226924304:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.685011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.692019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824468226924314:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.692049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.393530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.496949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.573938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.636318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.690977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.751389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.814634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:31.947984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:32.132228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824476816859785:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.132257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.132419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824476816859790:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.132426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824476816859791:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.132484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.133546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:32.169144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824476816859794:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:32.225911Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824476816859846:3571] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:32.756162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824455342020962:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:32.756194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::IndexInsert+QueryService-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomCategoryIndexesVariants[false,false,1024,0,1000,0,BLOOM_FILTER] [GOOD] Test command err: Trying to start YDB, gRPC: 22058, MsgBus: 20429 2025-10-24T16:22:10.851463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824384289654379:2095];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:10.852716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001319/r3tmp/tmpH5Dikg/pdisk_1.dat 2025-10-24T16:22:10.963434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:10.963457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:10.964500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:10.972625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:10.982545Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22058, node 1 2025-10-24T16:22:11.016351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:11.016362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:11.016364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:11.016400Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20429 2025-10-24T16:22:11.197397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:11.263773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:11.273493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:11.860065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:12.350498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824392879589577:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.350532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.350657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824392879589586:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.350666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:12.664202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:12.725289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.725364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.725415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.725763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.725933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.725951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.726131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.726151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:12.726170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:12.726189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:12.726208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:12.726572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:12.726591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824392879589647:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:12.731719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:12.740204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:12.740266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:12.740284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:12.740301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:12.740317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:12.740334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:12.740352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824392879589661:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:1 ... f0-96666606-5f034030;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a1" ORDER BY Col1; COMPARE: [[1u;["{\"a.b.c\":\"a1\"}"]]] OUTPUT: [[1u;["{\"a.b.c\":\"a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=DROP_INDEX, NAME=a_index) 2025-10-24T16:22:34.454473Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:34.469677Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:34.469811Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "1a1" ORDER BY Col1; COMPARE: [[11u;["{\"a.b.c\":\"1a1\"}"]]] OUTPUT: [[11u;["{\"a.b.c\":\"1a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_INDEX, NAME=b_index, TYPE=CATEGORY_BLOOM_FILTER, FEATURES=`{"column_name" : "Col2", "false_positive_probability" : 0.01}`) 2025-10-24T16:22:34.610952Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:34.617332Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:34.617477Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; 2025-10-24T16:22:34.924557Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824478492736709:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:34.924578Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824478492736709:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=6;to=7; 2025-10-24T16:22:34.924683Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824478492736709:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322954517;tx_id=281474976715672;;is_diff=1;version=6; 2025-10-24T16:22:34.924787Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824478492736709:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322954664;tx_id=281474976715674;;new_schema=Id: 0 SinceStep: 1761322954664 SinceTxId: 281474976715674 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 7 Indexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:34.927982Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824478492736707:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:34.928006Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824478492736707:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=6;to=7; 2025-10-24T16:22:34.928106Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824478492736707:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322954517;tx_id=281474976715672;;is_diff=1;version=6; 2025-10-24T16:22:34.928214Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824478492736707:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322954664;tx_id=281474976715674;;new_schema=Id: 0 SinceStep: 1761322954664 SinceTxId: 281474976715674 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 7 Indexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:34.928396Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824478492736707:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:34.929017Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824478492736709:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 >> KqpOlapJson::CompactionVariants[2,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,100,0.5] >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpWorkload::STOCK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24608, MsgBus: 9665 2025-10-24T16:22:27.370587Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824456298431026:2140];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.370651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:27.376549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000dfe/r3tmp/tmpBxPynf/pdisk_1.dat 2025-10-24T16:22:27.411253Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:27.411457Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824456298430916:2081] 1761322947369911 != 1761322947369914 TServer::EnableGrpc on GrpcPort 24608, node 1 2025-10-24T16:22:27.438988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:27.439002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:27.439004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:27.439040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9665 2025-10-24T16:22:27.480446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:27.480471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:27.484223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:27.504276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:27.539233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:27.544765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:27.560521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:27.591779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:27.619694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:27.642496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:27.998127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456298432559:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.998150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.998308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456298432569:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.998315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.304651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.322021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.354606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.372901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:28.397506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.433859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.492153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.551489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.662683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:28.787955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824460593400738:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.788004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.788251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824460593400743:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:28.788259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824460593400744:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10- ... ffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:33.496782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:33.500488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:33.509593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:33.534287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:33.587375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:33.617926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:34.040132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824485153225129:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.040159Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.045479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824485153225139:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.045539Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.057069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.081674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.099460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.123101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.139238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.172458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.195298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.210415Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:34.233533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.273196Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824485153226013:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.273217Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.273322Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824485153226018:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.273328Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824485153226019:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.273332Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:34.274331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:34.278239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:22:34.278354Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824485153226022:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:34.348742Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824485153226074:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:34.919273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.967597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:34.990590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,100,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,1000000,0] >> KqpQueryPerf::Replace-QueryService-UseSink >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0.5,BLOOM_FILTER] [GOOD] |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlap::CountWithPredicate [GOOD] >> KqpOlap::DropTable |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> TFileStoreWithReboots::CreateAlterNoVersion >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,1000000,0.5] >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpOlapJson::SimpleExistsVariants[1,false,1,0,0,0.5] [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,10,1000,0.5,BLOOM_FILTER] [GOOD] Test command err: Trying to start YDB, gRPC: 11920, MsgBus: 11049 2025-10-24T16:22:06.328077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824368417147567:2257];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:06.328180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001325/r3tmp/tmp82viEy/pdisk_1.dat 2025-10-24T16:22:06.387567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11920, node 1 2025-10-24T16:22:06.399977Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:06.419717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:06.419734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:06.419736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:06.419775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:06.430488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:06.430517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:06.431713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11049 TClient is connected to server localhost:11049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:06.483316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:06.568417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:06.863431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824368417148021:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.863457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.863562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824368417148030:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.863572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:06.911266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:06.923716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:06.923797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:06.923809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:06.923825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:06.923833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:06.923951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:06.923971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:06.923980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:06.923998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:06.924004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:06.924033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:06.924034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:06.924058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:06.924058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:06.924084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:06.924089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:06.924107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:06.924115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.924139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:06.924148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:06.924171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824368417148107:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:06.924178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824368417148091:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:06.924193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037 ... "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1B4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a\"") = "a4" ORDER BY Col1; 2025-10-24T16:22:35.966458Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824480350746460:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:35.966517Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824480350746460:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=8;to=9; 2025-10-24T16:22:35.966749Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824480350746460:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322954965;tx_id=281474976710676;;is_diff=1;version=8; 2025-10-24T16:22:35.967006Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824480350746460:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322955168;tx_id=281474976710678;;new_schema=Id: 0 SinceStep: 1761322955168 SinceTxId: 281474976710678 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 7 Version: 9 Indexes { Id: 4 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 5 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Indexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 9 DefaultCompression { } UpsertIndexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:35.967357Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824480350746475:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:35.967371Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824480350746475:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=8;to=9; 2025-10-24T16:22:35.968402Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824480350746475:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322954965;tx_id=281474976710676;;is_diff=1;version=8; 2025-10-24T16:22:35.968585Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824480350746475:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322955168;tx_id=281474976710678;;new_schema=Id: 0 SinceStep: 1761322955168 SinceTxId: 281474976710678 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 7 Version: 9 Indexes { Id: 4 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 5 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Indexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 9 DefaultCompression { } UpsertIndexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:35.968875Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824480350746475:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:35.968943Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824480350746460:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1b3" ORDER BY Col1; COMPARE: [[13u;["{\"b.c.d\":\"1b3\"}"]]] OUTPUT: [[13u;["{\"b.c.d\":\"1b3\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> KqpOlapJson::CompactionVariants[1,true,1,1000,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,1000,0,0.5] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,100,0] >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |52.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |52.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |52.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |52.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |52.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |52.3%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20191, MsgBus: 13230 2025-10-24T16:22:31.119074Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824475483661620:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:31.119317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:31.166206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d8f/r3tmp/tmp8J0WX5/pdisk_1.dat 2025-10-24T16:22:31.971748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:31.971774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:31.971903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:31.983219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20191, node 1 2025-10-24T16:22:32.119815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824475483661493:2081] 1761322951092668 != 1761322951092671 2025-10-24T16:22:32.139440Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:32.166997Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:32.212726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:32.232346Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:32.232356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:32.232358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:32.232397Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13230 TClient is connected to server localhost:13230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:33.602860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:33.616243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:33.631436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:33.683157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:34.042112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:34.308515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:35.941102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824492663532348:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:35.941142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:35.941318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824492663532358:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:35.941323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.023110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.030849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.046354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.064625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.085158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.099185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.112557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.118881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824475483661620:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.120424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:22:36.130304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.154601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496958500517:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.154623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.154763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496958500522:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.154772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496958500523:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.154778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.155702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.159955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:22:36.160015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824496958500526:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:36.236081Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824496958500579:3571] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 30656, MsgBus: 8948 2025-10-24T16:22:28.779004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824461231885655:2149];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:28.779028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000dc3/r3tmp/tmpLFotLE/pdisk_1.dat 2025-10-24T16:22:28.995914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:29.031126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:29.031149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:29.044541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:29.108347Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30656, node 1 2025-10-24T16:22:29.277492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:29.289525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:29.289535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:29.289537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:29.289573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8948 TClient is connected to server localhost:8948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:29.744491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:29.771922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:29.845572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:29.855927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.105491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.365661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:30.532739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:32.367832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824478411756389:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.372905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.381995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824478411756399:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.382028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:33.014868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.082401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.135676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.179379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.216436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.290178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.357334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.447438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:33.620437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824482706724570:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:33.620461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:33.620608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824482706724575:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:33.620615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824482706724576:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:33.620620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, i ... fication cookie mismatch for subscription [2:7564824493155042354:2081] 1761322955611740 != 1761322955611743 TServer::EnableGrpc on GrpcPort 30907, node 2 2025-10-24T16:22:35.668004Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:35.668015Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:35.668017Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:35.668063Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20167 TClient is connected to server localhost:20167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:35.821205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:35.824196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:35.844657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:35.868180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:35.910116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:35.948864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:35.968922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.088206Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824497450011291:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.088240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.088396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824497450011301:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.088403Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.092499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.100042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.109072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.127216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.136178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.151562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.164606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.179488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.199256Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824497450012168:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.199274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.199371Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824497450012173:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.199376Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824497450012174:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.199427Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.200134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.206473Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824497450012177:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:36.292262Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824497450012229:3551] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:36.621505Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[1,false,1,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24717, MsgBus: 24580 2025-10-24T16:22:25.629277Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824447489307357:2257];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:25.639124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:25.641100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012fa/r3tmp/tmpC9SDX3/pdisk_1.dat 2025-10-24T16:22:25.838578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:25.838604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:25.841904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:25.863931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824447489307134:2081] 1761322945573483 != 1761322945573486 2025-10-24T16:22:25.863965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:25.866654Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24717, node 1 2025-10-24T16:22:25.962148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:25.962162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:25.962165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:25.962207Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:26.024622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24580 TClient is connected to server localhost:24580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.243884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.246613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:26.562464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824451784275105:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.562493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.562688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824451784275115:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.562696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.610092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:26.631896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:26.637819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:26.638088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:26.638159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:26.638232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:26.638534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:26.638558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:26.638639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:26.638663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:26.639090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:26.639118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:26.639140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:26.639218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:26.639240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824451784275168:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:26.648220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:26.648239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:26.648253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:26.648258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:26.648281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:26.648286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:26.648299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:26.648305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormali ... xecute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:22:36.217559Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:22:36.217565Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:22:36.217572Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:36.217578Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:36.217584Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:36.217599Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:36.217605Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:36.217615Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:36.217618Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:36.220736Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564824495318898044:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=126171015214496;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322956220;max=18446744073709551615;plan=0;src=[6:7564824491023930383:2153];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.222944Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.222964Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.222970Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:36.246059Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:36.246640Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898117:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.246783Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.246955Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898129:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.246963Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.249447Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:36.259494Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898149:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.259518Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.259790Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898152:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.259821Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.260106Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:36.263009Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:36.270222Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898182:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.270257Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.270384Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898187:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.270399Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.270412Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564824495318898188:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.271447Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.274866Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564824495318898191:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:36.360757Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564824495318898242:2440] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:36.687107Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TFileStoreWithReboots::CreateDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31369, MsgBus: 19705 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d87/r3tmp/tmpDH95UK/pdisk_1.dat 2025-10-24T16:22:35.592295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:35.607073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:35.607105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:35.612266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:35.620522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:35.624721Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31369, node 1 2025-10-24T16:22:35.647560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:35.647575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:35.647578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:35.647627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19705 2025-10-24T16:22:35.759930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:35.884373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:35.912332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.025655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.089885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.111919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.198196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496578532451:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.198221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.198356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496578532461:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.198362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.259901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:36.276567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.286876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.296603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.311025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.325790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.340675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.353398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.367729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.387473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496578533324:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.387513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.387520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496578533329:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.387587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496578533331:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.387603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.388368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.393908Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824496578533332:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:36.475789Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824496578533385:3559] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |52.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13830, MsgBus: 14823 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d86/r3tmp/tmpHJug2M/pdisk_1.dat 2025-10-24T16:22:35.276016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:35.276044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:35.277952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:35.321822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:35.321885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:35.326499Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13830, node 1 2025-10-24T16:22:35.460690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:35.460702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:35.460704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:35.460746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:35.542496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14823 TClient is connected to server localhost:14823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:35.744959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:35.761792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:35.870866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:35.963170Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-10-24T16:22:35.976034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:35.996795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.133289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497664897984:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.133318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.133801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497664897994:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.133812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.199494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.209908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.221089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.234339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.248419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.265160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.278774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.291232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.309067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497664898865:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.309096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.309167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497664898870:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.309177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497664898871:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.309184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.310005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.317175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824497664898874:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:36.397804Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824497664898935:3564] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,10,0,0.5] >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,1000000,0.5] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[10,true,1024,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 25470, MsgBus: 1948 2025-10-24T16:22:21.728132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824432481050793:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:21.731261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012fd/r3tmp/tmpRUBg71/pdisk_1.dat 2025-10-24T16:22:22.028228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:22.028253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:22.032289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:22.057781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:22.089945Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25470, node 1 2025-10-24T16:22:22.256000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:22.256009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:22.256011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:22.256183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:22.308618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:22.735990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1948 TClient is connected to server localhost:1948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:23.293105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:23.324097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:24.707139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824445365953316:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.712139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.712318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824445365953326:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.712324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.712420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824445365953328:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:24.712425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:25.073994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:25.160885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:25.160989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:25.161075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:25.161103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:25.161122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:25.161148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:25.161170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:25.161193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:25.161214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:25.161233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:25.161256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:25.161275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:25.161301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824449660920758:2331];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:25.162109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:25.162159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:25.162199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:25.162219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:25.162240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:25.162261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564824449660920794:2334];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NA ... : "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329437Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329523Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329605Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329693Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329858Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.329949Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.330033Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:36.330116Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:36.334738Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824496083811191:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.334760Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.334846Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824496083811197:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.334888Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824496083811196:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.334909Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.335519Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.337250Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824496083811200:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:36.434001Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824496083811251:2685] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:8/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:36.763136Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,100,0.5] >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18739, MsgBus: 62012 2025-10-24T16:22:35.997329Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824489760713979:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:35.997348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:36.008352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d6d/r3tmp/tmpLELRL4/pdisk_1.dat 2025-10-24T16:22:36.038871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.038896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.039663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:36.046986Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:36.047189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824489760713942:2081] 1761322955996882 != 1761322955996885 TServer::EnableGrpc on GrpcPort 18739, node 1 2025-10-24T16:22:36.073380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.073393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.073395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.073436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62012 2025-10-24T16:22:36.128606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.186478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.192264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:36.197198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.231684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.261097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.274078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.456326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494055682884:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.456357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.460052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494055682894:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.460078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.511823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.522303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.536017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.549155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.566591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.577756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.591977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.606312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.627743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494055683757:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.627771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.627863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494055683763:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.627879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494055683762:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.627896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.628616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.632226Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824494055683766:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:36.708068Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824494055683818:3557] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.000959Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,10,1000000,0.5] [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,0,0] >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,100,0.5] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0.5,BLOOM_FILTER] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2636, MsgBus: 5177 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d3a/r3tmp/tmp8956h1/pdisk_1.dat 2025-10-24T16:22:36.259932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.259968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:36.261040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.261071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.268218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:36.270658Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:36.275960Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824495106476943:2081] 1761322956211443 != 1761322956211446 TServer::EnableGrpc on GrpcPort 2636, node 1 2025-10-24T16:22:36.287228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.287237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.287240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.287275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5177 TClient is connected to server localhost:5177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.362330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.366088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:36.370468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.393793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:36.424778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.428172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:36.441563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.672750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495106478596:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.672777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.679969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495106478606:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.679996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.733734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.758584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.782618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.800456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.837575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.856009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.876206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.897468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.936584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495106479466:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.936613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.936727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495106479472:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.936732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.936740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495106479471:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.937707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.942733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:22:36.942823Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824495106479475:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:37.036149Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824499401446823:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.224106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,1000000,0.5] [GOOD] |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,100,0.5] >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25144, MsgBus: 16716 2025-10-24T16:22:36.157316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824494248885888:2263];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.157392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d53/r3tmp/tmpUS3hcv/pdisk_1.dat 2025-10-24T16:22:36.203979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.215579Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:36.216590Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824494248885659:2081] 1761322956149111 != 1761322956149114 TServer::EnableGrpc on GrpcPort 25144, node 1 2025-10-24T16:22:36.232025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.232037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.232038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.232082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16716 2025-10-24T16:22:36.266612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.266641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.267712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.288727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.294341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:22:36.314293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.341524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.367653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.382462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.475188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:36.578871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494248887306:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.578923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.579078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494248887316:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.579088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.638843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.648981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.661487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.689241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.704084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.721819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.730465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.746951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.776618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494248888178:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.776670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.776784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494248888183:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.776786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824494248888184:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.776795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.777815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:36.781825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824494248888187:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:36.849182Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824494248888239:3555] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.148318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 30783, MsgBus: 9687 2025-10-24T16:22:36.388430Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824495417924492:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.391246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d32/r3tmp/tmp0T3QZT/pdisk_1.dat 2025-10-24T16:22:36.442298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.459095Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30783, node 1 2025-10-24T16:22:36.480212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.480231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.480233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.480283Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:36.488833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.488870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.490351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9687 TClient is connected to server localhost:9687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.549346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.556626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:22:36.570364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.595263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.641120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.662344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.727906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:36.888425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495417926055:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.888457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.888581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824495417926065:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.888586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.009645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.031391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.041636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.060765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.072132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.090265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.109758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.132179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.164107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824499712894224:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.164156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.167658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824499712894229:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.167785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824499712894230:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.167801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.168793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:37.173337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824499712894233:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:37.273457Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824499712894285:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.400051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,0,0.5] >> KqpOlapJson::CompactionVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,true,1,1000,100,0] |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2582, MsgBus: 9705 2025-10-24T16:22:36.620312Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824495846925242:2191];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.620411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000cf6/r3tmp/tmpkyEusP/pdisk_1.dat 2025-10-24T16:22:36.676506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.685017Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2582, node 1 2025-10-24T16:22:36.704120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.704131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.704133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.704175Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9705 2025-10-24T16:22:36.721271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.721298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.722393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.772760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.776222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:36.780963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.815562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.841239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.853389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.943638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:37.068435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824500141894001:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.068464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.068642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824500141894011:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.068650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.122552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.143008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.160541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.176517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.186996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.209454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.239975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.260579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.298015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824500141894869:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.298133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.298433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824500141894876:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.298457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824500141894877:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.298483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.299557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:37.304127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:22:37.304178Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824500141894880:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:22:37.393111Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824500141894932:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.621157Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0,BLOOM_FILTER] >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> IndexBuildTestReboots::IndexPartitioningUniq [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,10,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,1000000,0] >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,100,0.5] [GOOD] >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::CompactionVariants[10,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,1000000,0.5] >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[1,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 18764, MsgBus: 12306 2025-10-24T16:22:27.108989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824456787053732:2071];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.109023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f3/r3tmp/tmpxUHSBI/pdisk_1.dat 2025-10-24T16:22:27.147529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:27.160156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824456787053692:2081] 1761322947108692 != 1761322947108695 2025-10-24T16:22:27.160832Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18764, node 1 2025-10-24T16:22:27.178570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:27.178583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:27.178585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:27.178626Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12306 TClient is connected to server localhost:12306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:27.222498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:27.222527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:27.223632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:27.227229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:27.408377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:27.569312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456787054370:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.569337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.569464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456787054380:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.569473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.618759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:27.638502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.638557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.638610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.638638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.638665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.638690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.638712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.638766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.638787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:27.638809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:27.638832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:27.638857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:27.638879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456787054435:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:27.648065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:27.648087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:27.648105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:27.648111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:27.648136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:27.648141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:27.648156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:27.648162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:27.648172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:27.648178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:27.648185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:22:38.528531Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:22:38.528536Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:22:38.528548Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:22:38.528553Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:22:38.528562Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:38.528566Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:38.537022Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503364890072:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=18615439613184;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322958536;max=18446744073709551615;plan=0;src=[7:7564824499069922417:2156];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:38.545480Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:38.545496Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:38.545498Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:38.559896Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:38.560267Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890144:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.560382Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.560487Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890156:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.560493Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.565141Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:38.587237Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:38.587715Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890176:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.587804Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.587955Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890188:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.587974Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.589315Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:38.606186Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890209:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.606208Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.606337Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890215:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.606343Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503364890214:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.606361Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.607220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:38.610174Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:38.610260Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824503364890218:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:38.700712Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824503364890269:2440] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[1,1,1],\"size\":[6,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[3,1,1]},\"columns\":{\"accessor\":[1],\"size\":[15],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[3]},\"o_size\":672,\"h_size\":760,\"c_size\":280}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:22:38.949778Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,100,0.5] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,1000,1000000,0.5] |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18786, MsgBus: 29665 2025-10-24T16:22:36.288201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824497631559021:2179];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.288228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d69/r3tmp/tmpGuKItJ/pdisk_1.dat 2025-10-24T16:22:36.342695Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.342812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824497631558880:2081] 1761322956287093 != 1761322956287096 2025-10-24T16:22:36.345348Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18786, node 1 2025-10-24T16:22:36.354649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.354661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.354663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.354711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29665 TClient is connected to server localhost:29665 2025-10-24T16:22:36.400052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.400086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:36.403721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.410931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.424407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.454522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.481255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.496851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.566016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:36.719055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497631560529:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.719083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.719341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497631560539:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.719347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.789419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.799830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.808668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.823864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.836457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.850488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.864487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.879353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.912693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497631561400:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.912724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.912895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497631561405:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.912904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824497631561406:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.912971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.913772Z node 1 :FLAT_TX_SCHEMESHARD ... :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:38.027745Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:38.027746Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:38.027785Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29213 TClient is connected to server localhost:29213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:38.160335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:38.164413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:22:38.232728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:38.256336Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:38.268423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.368988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.402540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.844623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824502359732821:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.844654Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.845899Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824502359732831:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.845906Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.860743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:38.880044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:38.923284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:38.952573Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:38.963740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:38.994907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.024497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.060453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.112632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.173959Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506654701003:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.173990Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.180290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506654701008:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.180313Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506654701009:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.180343Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.181208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:39.185711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:22:39.185780Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824506654701012:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:39.244743Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824506654701064:3560] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,100,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[2,false,1024,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 31180, MsgBus: 13190 2025-10-24T16:22:26.182236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824452968612179:2071];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:26.182383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f7/r3tmp/tmpWsfL4j/pdisk_1.dat 2025-10-24T16:22:26.326425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:26.348096Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31180, node 1 2025-10-24T16:22:26.351755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:26.351776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:26.355317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:26.364808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:26.364819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:26.364820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:26.364854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13190 TClient is connected to server localhost:13190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.502066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.508295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:26.569602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:26.879923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824452968612823:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.879944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.880029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824452968612833:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.880038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.984696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:27.008155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.008263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.008328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.008352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.008372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.008395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.008422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.008444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.008468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:27.008489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:27.008513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:27.008536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:27.008556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824452968612897:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:27.009271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.009298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.009343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.009369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.009400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.009419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.009440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.009460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.009479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824457263580205:2324];tablet_id=72075186224037888;process=TTxInitSchema: ... 9.749368Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:39.749370Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:39.750503Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:39.750510Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:39.750512Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:22:39.778627Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:39.779045Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981365970:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.779205Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.779336Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981365982:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.779342Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.781291Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:39.781415Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:39.793915Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981366006:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.793933Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.794048Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981366008:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.794053Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.800201Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:39.805448Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:39.805599Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:39.812110Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:22:39.826715Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981366052:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.826736Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.826886Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981366057:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.826895Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824509981366058:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.826912Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.827703Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:39.830881Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:39.830947Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824509981366061:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:39.922902Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824509981366112:2468] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1,1,1],\"size\":[4,4,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[2,2,1,1]},\"o_size\":0,\"h_size\":431,\"c_size\":1088}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[1,1],\"size\":[2,11],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"b.c.d\\\"\"],\"records\":[1,1]},\"o_size\":0,\"h_size\":381,\"c_size\":488}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,0,0.5,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0,CATEGORY_BLOOM_FILTER] >> TFileStoreWithReboots::Create ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 3132, MsgBus: 10360 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012fe/r3tmp/tmpBSlKUj/pdisk_1.dat 2025-10-24T16:22:20.690361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:20.718502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:20.724028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:20.732366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:20.745409Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824427969699810:2081] 1761322940419554 != 1761322940419557 2025-10-24T16:22:20.745431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:20.764075Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3132, node 1 2025-10-24T16:22:20.856072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:20.952038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:20.952052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:20.952054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:20.952089Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10360 2025-10-24T16:22:21.420516Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:21.593406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:22.645238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824436559635085:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.645282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.645566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824436559635094:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.645577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:23.118548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:23.232354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:23.232435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:23.232524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:23.232543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:23.232562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:23.232583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:23.232600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:23.232616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:23.232632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:23.232648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:23.232664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:23.232686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:23.232704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824440854602449:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:23.244347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:23.244366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:23.244378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:23.244384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:23.244406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:23.244411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:23.244421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:23.244426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:23.244433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:23.244438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:22:23.244446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;even ... ssues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.663141Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.669427Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:38.677659Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:38.677985Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503854524716:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.678045Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.678148Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503854524728:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.678196Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.685218Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:38.693301Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503854524749:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.693319Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.693419Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503854524754:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.693424Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824503854524755:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.693463Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:38.694051Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:38.701075Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824503854524758:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:22:38.764911Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824503854524809:2465] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:38.812112Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:22:38.884346Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:22:38.932170Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:38.940193Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:39.572340Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:39.572356Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:39.572362Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:39.572522Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322958661;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:39.572536Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322958696;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:39.572542Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322958717;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:39.572648Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824503854524579:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322958731;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322958731 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirs |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::CompactionVariants[1,true,1,1000,100,0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11024, MsgBus: 12935 2025-10-24T16:22:36.273009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:36.273091Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824496015524157:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.273152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d58/r3tmp/tmp1Dc7qK/pdisk_1.dat 2025-10-24T16:22:36.300477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.300504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.301596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11024, node 1 2025-10-24T16:22:36.314711Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:36.320261Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824496015523940:2081] 1761322956259924 != 1761322956259927 2025-10-24T16:22:36.332053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.332064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.332066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.332105Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12935 2025-10-24T16:22:36.376917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.394535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.397993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:22:36.412865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.448504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.472727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.488894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:36.719915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496015525584:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.719942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.720068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496015525594:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.720073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.776514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.785076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.795199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.808667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.824413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.846171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.858215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.872924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.898166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496015526456:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.898198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.898276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496015526462:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.898287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824496015526461:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.898309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, st ... n: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:38.652420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:38.660708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:38.668246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.712460Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:38.715191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.778551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:38.837842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:39.276622Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506737520582:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.276645Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.277011Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506737520592:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.277020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.296001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.319271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.335201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.370507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.393173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.399982Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:39.416328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.433579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.461018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:39.506318Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506737521462:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.506340Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.506496Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506737521467:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.506505Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824506737521468:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.506552Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:39.507265Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:39.511937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:22:39.512010Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824506737521471:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:22:39.591883Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824506737521523:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:40.044583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:40.066818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:40.088010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[10,false,1,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 21840, MsgBus: 22962 2025-10-24T16:22:26.718603Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824454272779840:2074];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:26.718632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:26.722957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f5/r3tmp/tmpIUswmO/pdisk_1.dat 2025-10-24T16:22:26.750319Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21840, node 1 2025-10-24T16:22:26.761439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:26.761455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:26.761456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:26.761499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22962 2025-10-24T16:22:26.820082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:26.820106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:26.824073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:26.826146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.911301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.920218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:27.283393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824458567747760:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.283420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.283529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824458567747770:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.283551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.328678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:27.368707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.368791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.368841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.368860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.368878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.368901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.368928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.368949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.368974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:27.368997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:27.369018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:27.369041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:27.369061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564824458567747870:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:27.374990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.375024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.375072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.375095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.375115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.375136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.375156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.375179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.375212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824458567747871:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstr ... ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.657872Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.657995Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658011Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658132Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658145Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658284Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658407Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:40.658416Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:22:40.681648Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824515181918422:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:40.681666Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:40.681800Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824515181918427:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:40.681808Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824515181918428:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:40.681849Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:40.682514Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:40.685004Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:40.685063Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824515181918431:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:40.777006Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824515181918482:2688] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:8/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::CompactionVariants[1,true,1,1000,100,0.5] >> TFileStoreWithReboots::CreateAlter >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,100,0.5] [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::ReconnectReplica |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::NotAvailableByShutdown |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,1000000,0.5] >> TBoardSubscriber2DCTest::SimpleSubscriber >> TargetDiscoverer::Transfer |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 5298, MsgBus: 7596 2025-10-24T16:22:28.488315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824460307689698:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:28.490420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f0/r3tmp/tmpKeGUsQ/pdisk_1.dat 2025-10-24T16:22:28.892365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:28.896424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:28.896610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:28.897744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5298, node 1 2025-10-24T16:22:29.032305Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:29.078655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:29.152197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:29.152209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:29.152211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:29.152756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7596 2025-10-24T16:22:29.490939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:29.729179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:29.744897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:31.513122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824473192592222:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.513158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.513309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824473192592234:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:31.513315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:32.268886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:32.363199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:32.363264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:32.363331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:32.363354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:32.363376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:32.363399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:32.363429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:32.363451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:32.363472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:32.363494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:32.363516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:32.363540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:32.363563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824477487559594:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:32.375838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:32.375887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:32.375947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:32.375964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:32.375981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:32.375997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:32.376183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:32.376204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824477487559605:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T1 ... tx_id=281474976710658;this=124984555799696;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761322961934;max=18446744073709551615;plan=0;src=[7:7564824517459451571:2154];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.937462Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.937475Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.937478Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.938678Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.938686Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:41.938688Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:41.955311Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:41.955885Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452033:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.956049Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.956161Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452045:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.956166Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.958138Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:41.958291Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:41.968941Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:41.969457Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452069:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.969572Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.969686Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452081:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.969693Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.972121Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:41.972313Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:22:41.984237Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452106:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.984256Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.984406Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452111:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.984413Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824517459452112:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.984427Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:41.985113Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:41.987457Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:22:41.987520Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824517459452115:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:22:42.068859Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824521754419462:2464] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:4/0/0 HEADER:0/0/0 2025-10-24T16:22:42.435252Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::DropByDisconnect |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,0,0,0] >> TargetDiscoverer::IndexedTable >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher |52.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |52.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |52.5%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TargetDiscoverer::InvalidCredentials >> KqpOlapJson::CompactionVariants[2,false,1024,1000,1000000,0.5] [GOOD] |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TBoardSubscriber2DCTest::ReconnectReplica >> TChargeBTreeIndex::FewNodes >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TargetDiscoverer::Dirs >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TargetDiscoverer::Negative >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] >> TargetDiscoverer::Basic >> TargetDiscoverer::InvalidCredentials [GOOD] >> TargetDiscoverer::RetryableError |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::IndexPartitioningUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.416119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.416143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.416149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.416159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.416166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.416171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.416181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.416196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.416319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.416384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.438722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.438760Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.438869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.441782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.441863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.441892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.444099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.444197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.444284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.444341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.445024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.445057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.445227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.445233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.445255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.445260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.445265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.445282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.446284Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.462489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.462567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.462620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.462627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.462676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.462702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.463343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.463384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.463428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.463436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.463440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.463444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.463752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.463761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.463765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.464090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.464099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.464102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.464107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.464512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.464915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.464959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.465196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.465222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... ze: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:40.205596Z node 354 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:22:40.205625Z node 354 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 31us result status StatusSuccess 2025-10-24T16:22:40.205750Z node 354 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Index" LocalPathId: 4 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:40.205797Z node 354 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:22:40.205834Z node 354 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 37us result status StatusSuccess 2025-10-24T16:22:40.205970Z node 354 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0.5,CATEGORY_BLOOM_FILTER] >> KqpOlapJson::CompactionVariants[1,true,1,1000,100,0.5] [GOOD] |52.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-10-24T16:22:43.710621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000990/r3tmp/tmpTxa5ZW/pdisk_1.dat 2025-10-24T16:22:43.868470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:43.868504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:43.871026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:43.871076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:43.872167Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:43.874644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:43.876041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824525652974471:2081] 1761322963697766 != 1761322963697769 TClient is connected to server localhost:13782 TServer::EnableGrpc on GrpcPort 1652, node 1 2025-10-24T16:22:43.923946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:43.923957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:43.923960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:43.924003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:43.992970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:43.996098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:43.997108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:44.088400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:44.090995Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-10-24T16:22:44.091004Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user }, iteration# 0 |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] >> TargetDiscoverer::Transfer [GOOD] >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[2,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 29027, MsgBus: 62822 2025-10-24T16:22:26.324060Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824454520781704:2093];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:26.325689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f6/r3tmp/tmpNWNE2o/pdisk_1.dat 2025-10-24T16:22:26.386231Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:26.387722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29027, node 1 2025-10-24T16:22:26.401935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:26.401947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:26.401950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:26.401990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62822 2025-10-24T16:22:26.424044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:26.424074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:26.428174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.508630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.512942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:26.595344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:26.812285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824454520782288:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.812310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.812424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824454520782298:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.812429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.854917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:26.872834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:26.872934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:26.872983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:26.873004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:26.873027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:26.873048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:26.873069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:26.873090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:26.873110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:26.873133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:26.873157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:26.873178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:26.873200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824454520782356:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:26.875225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:26.875253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:26.875298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:26.875324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:26.875351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:26.875377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:26.875402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:26.875429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:26.875453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824454520782360:2324];tablet_id=72075186224037889;process=TTxInitSchema: ... nd or you don't have access permissions } 2025-10-24T16:22:42.147922Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824522358405035:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:42.147958Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824522358405036:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:42.148052Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:42.149231Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:42.152832Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-10-24T16:22:42.152996Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824522358405039:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:22:42.236869Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824522358405090:2496] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:42.257210Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:42.257259Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:22:42.323190Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:42.323257Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:22:42.364264Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=26;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:22:42.446360Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:43.024176Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:43.024192Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:43.024199Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:43.024355Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962077;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:43.024368Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962112;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:43.024373Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962133;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:43.024461Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824518063437522:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322962168;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322962168 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:43.029593Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:43.029609Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:43.029615Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:43.029788Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962077;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:43.029805Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962112;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:43.029811Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322962133;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:43.029916Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824518063437526:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322962168;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322962168 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0,BLOOM_FILTER] |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TargetDiscoverer::RetryableError [GOOD] |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TargetDiscoverer::Negative [GOOD] |52.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TTransferTests::Create_Disabled >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,1000000,0.5] [GOOD] >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TargetDiscoverer::SystemObjects >> TargetDiscoverer::IndexedTable [GOOD] |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |52.6%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-10-24T16:22:43.470877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824528044160675:2238];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:43.470899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009ad/r3tmp/tmpsZE0j5/pdisk_1.dat 2025-10-24T16:22:43.626529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:43.637780Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:43.785939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:43.785961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:43.792416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12008 TServer::EnableGrpc on GrpcPort 32231, node 1 2025-10-24T16:22:43.896690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:43.970481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:43.970493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:43.970495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:43.970538Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:44.249752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:22:44.276181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:44.383968Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1761322964366, tx_id: 281474976710658 } } } 2025-10-24T16:22:44.383983Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-10-24T16:22:44.396203Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:168: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-10-24T16:22:44.396216Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:183: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-10-24T16:22:44.396223Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:193: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer 2025-10-24T16:22:44.477073Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TargetDiscoverer::Dirs [GOOD] >> TTransferTests::Create >> TargetDiscoverer::Basic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[1,true,1,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 21030, MsgBus: 21075 2025-10-24T16:22:26.712485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824451904881842:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:26.712809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:26.719612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f4/r3tmp/tmptrcZXj/pdisk_1.dat 2025-10-24T16:22:26.739898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:26.739930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:26.741003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:26.748056Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:26.748238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824451904881815:2081] 1761322946712185 != 1761322946712188 TServer::EnableGrpc on GrpcPort 21030, node 1 2025-10-24T16:22:26.760023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:26.760031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:26.760032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:26.760072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21075 TClient is connected to server localhost:21075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:26.824576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.834360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.837084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:27.077758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456199849782:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.077781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.077886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824456199849792:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.077892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:27.159553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:27.174861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:27.175232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:27.175325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:27.175345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:27.175364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:27.175395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:27.175415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:27.175433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:27.175483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:27.175501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:27.175539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:27.175556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:27.175574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824456199849845:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:27.179305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:27.179321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:27.179345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:27.179350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:27.179384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:27.179390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:27.179401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:27.179406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:27.179415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:22:27.179420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descript ... emeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:43.038589Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:43.052360Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824526457328123:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.052386Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.052513Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824526457328125:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.052521Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.056003Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:43.058101Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:43.063955Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824526457328156:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.063988Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.064110Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824526457328161:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.064122Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824526457328162:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.064139Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.064882Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:43.072104Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824526457328165:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:22:43.156358Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824526457328216:2464] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:43.177735Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:22:43.207570Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:22:43.242233Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:22:43.539064Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:44.002050Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:44.002065Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:44.002070Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:44.002298Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963050;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:44.002317Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963071;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:44.002322Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963085;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:44.002437Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824522162360691:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322963106;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322963106 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |52.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials |52.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |52.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError [GOOD] Test command err: 2025-10-24T16:22:44.580708Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824530811842770:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:44.580735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00096d/r3tmp/tmpT9tXj7/pdisk_1.dat 2025-10-24T16:22:44.623331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:44.635457Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:26159 TServer::EnableGrpc on GrpcPort 28773, node 1 2025-10-24T16:22:44.668102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:44.668115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:44.668117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:44.668159Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:44.681086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:44.681115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:44.682045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:44.726733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:44.730032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:44.730410Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-0 } } } 2025-10-24T16:22:44.730426Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-0 }, iteration# 0 2025-10-24T16:22:44.730452Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-1 } } } 2025-10-24T16:22:44.730460Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-1 }, iteration# 1 2025-10-24T16:22:44.730467Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-2 } } } 2025-10-24T16:22:44.730470Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-2 }, iteration# 2 2025-10-24T16:22:44.730480Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-3 } } } 2025-10-24T16:22:44.730489Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-3 }, iteration# 3 2025-10-24T16:22:44.730494Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-4 } } } 2025-10-24T16:22:44.730497Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-4 }, iteration# 4 2025-10-24T16:22:44.730502Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-5 } } } 2025-10-24T16:22:44.730505Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-5 }, iteration# 5 2025-10-24T16:22:44.777895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> KqpOlapJson::CompactionVariants[10,true,1024,0,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[10,true,1024,0,0,0.5] |52.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-10-24T16:22:44.575489Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824529034603691:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:44.575624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00097e/r3tmp/tmpKgq6wj/pdisk_1.dat 2025-10-24T16:22:44.624113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:44.636481Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:18690 TServer::EnableGrpc on GrpcPort 26811, node 1 2025-10-24T16:22:44.684084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:44.684109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:44.688099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:44.688113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:44.688115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:44.688159Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:44.688211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:44.740473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:44.750354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:44.761173Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-10-24T16:22:44.761195Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential |52.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-10-24T16:22:43.607143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824524138440327:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:43.607220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0009b2/r3tmp/tmpKaidol/pdisk_1.dat 2025-10-24T16:22:43.766871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:43.766903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:43.768213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:43.773536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:43.812782Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:25760 TServer::EnableGrpc on GrpcPort 14792, node 1 2025-10-24T16:22:44.039959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:44.120109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:44.120121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:44.120123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:44.120178Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:44.281838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:44.290494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:44.348847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:44.609006Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:44.725866Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322964338, tx_id: 1 } } } 2025-10-24T16:22:44.725880Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-10-24T16:22:44.738237Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322964674, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-10-24T16:22:44.738255Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-10-24T16:22:45.034024Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322964674, tx_id: 281474976715658 } } } 2025-10-24T16:22:45.034040Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-10-24T16:22:45.034046Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-10-24T16:22:45.034062Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:142: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> KqpOlap::DropTable [GOOD] |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-10-24T16:22:44.683335Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824530530595212:2074];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:44.686507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:44.701567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000995/r3tmp/tmp5BLoS3/pdisk_1.dat 2025-10-24T16:22:44.844415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:44.844440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:44.845471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:44.873498Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:44.876716Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824530530595177:2081] 1761322964673746 != 1761322964673749 2025-10-24T16:22:44.876991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8654 TServer::EnableGrpc on GrpcPort 9980, node 1 2025-10-24T16:22:45.047363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:45.047381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:45.047383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:45.047431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:45.115888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:45.126096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:45.186010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:45.243562Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322965164, tx_id: 1 } } } 2025-10-24T16:22:45.243574Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-10-24T16:22:45.245032Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322965255, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-10-24T16:22:45.245040Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-10-24T16:22:45.354908Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322965255, tx_id: 281474976715658 } } } 2025-10-24T16:22:45.354926Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-10-24T16:22:45.354932Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-10-24T16:22:44.134955Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824531723566414:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:44.134980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000981/r3tmp/tmpvc3PW5/pdisk_1.dat 2025-10-24T16:22:44.196759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:44.236247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:44.236283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:44.243598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:44.253604Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:7767 TServer::EnableGrpc on GrpcPort 4151, node 1 2025-10-24T16:22:44.444277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:44.564106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:44.564118Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:44.564127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:44.564173Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:44.718568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:44.724112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:22:44.734831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:44.797231Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322964765, tx_id: 1 } } } 2025-10-24T16:22:44.797247Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-10-24T16:22:44.798125Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322964779, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-10-24T16:22:44.798132Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-10-24T16:22:44.798698Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322964814, tx_id: 281474976715659 } }] } } 2025-10-24T16:22:44.798705Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-10-24T16:22:45.137136Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:45.213630Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322964814, tx_id: 281474976715659 } } } 2025-10-24T16:22:45.213648Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-10-24T16:22:45.213654Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[1,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 13425, MsgBus: 25890 2025-10-24T16:22:25.901932Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824448719501174:2256];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:25.902163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:25.907778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f9/r3tmp/tmp8OKcQR/pdisk_1.dat 2025-10-24T16:22:26.141473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:26.141496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:26.148467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:26.170686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 13425, node 1 2025-10-24T16:22:26.209836Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:26.217684Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824448719500954:2081] 1761322945872045 != 1761322945872048 2025-10-24T16:22:26.252017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:26.252032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:26.252033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:26.252073Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25890 TClient is connected to server localhost:25890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:26.411615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:26.422881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:26.428311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:22:26.649961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824453014468925:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.649990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.650148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824453014468935:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.650154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:26.734170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:26.743747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:26.743795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:26.743834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:26.743896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:26.743935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:26.743969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:26.743990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:26.744012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:26.744033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:26.744053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:26.744077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:26.744106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:26.744125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824453014468988:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:26.744640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:22:26.744658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:22:26.744671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:22:26.744677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:22:26.744706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:22:26.744711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:22:26.744723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:22:26.744735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:22:26.744743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... 057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:43.664807Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:22:43.678632Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:43.679092Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824527964293322:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.679204Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.679336Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824527964293334:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.679343Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.690240Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:22:43.720185Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824527964293355:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.720209Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.720370Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824527964293360:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.720390Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824527964293361:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.720469Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:43.721469Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:43.724518Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-10-24T16:22:43.724598Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824527964293364:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:22:43.787124Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824527964293415:2461] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:43.818494Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:22:43.872837Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:22:43.929825Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:22:44.111744Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:22:44.608220Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:44.608238Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:44.608245Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:44.608490Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963659;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:22:44.608507Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963673;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:22:44.608513Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322963708;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:22:44.608640Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824527964293184:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322963736;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761322963736 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0,BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0.5,BLOOM_FILTER] |52.8%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TargetDiscoverer::SystemObjects [GOOD] |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |52.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::DropTable [GOOD] Test command err: Trying to start YDB, gRPC: 23139, MsgBus: 15141 2025-10-24T16:22:21.368334Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824430512183386:2246];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:21.368393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:21.389243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012ff/r3tmp/tmpTrJwfd/pdisk_1.dat 2025-10-24T16:22:21.768189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:21.792263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:21.792282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:21.800256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:21.894469Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:21.895895Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824430512183177:2081] 1761322941333424 != 1761322941333427 TServer::EnableGrpc on GrpcPort 23139, node 1 2025-10-24T16:22:21.996268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:21.996279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:21.996281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:21.996316Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:22.046940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15141 TClient is connected to server localhost:15141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:22.255455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:22.280114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:22.302059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-10-24T16:22:22.400127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:22.400466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:22.400508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:22.400526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:22.400541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:22.400689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:22.401352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:22.401381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:22.401398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:22.401415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:22.401433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:22.401454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:22.401473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434807151179:2302];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:22.425003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:22.425157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:22.425221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:22.425410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:22.425426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:22.425442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:22.425458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:22.425474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:22.425636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:22.425654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:22.425672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:22.425688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434807151178:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:22.425704Z node 1 :TX_COL ... oreAppearanceSnapshot; 2025-10-24T16:22:36.867393Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:22:36.867397Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:22:36.870386Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.870447Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.870494Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.870537Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:36.872642Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:747) 2025-10-24T16:22:36.873421Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824496204797481:2302];ev=NActors::IEventHandle;tablet_id=72075186224037890;tx_id=281474976710659;this=54045631145872;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=1761322956919;max=18446744073709551615;plan=0;src=[7:7564824496204797155:2148];cookie=32:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.873533Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824496204797476:2301];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710659;this=54045638497168;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=1761322956919;max=18446744073709551615;plan=0;src=[7:7564824496204797155:2148];cookie=12:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.873616Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824496204797482:2303];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710659;this=54045638645712;method=TTxController::StartProposeOnExecute;tx_info=281474976710659:TX_KIND_SCHEMA;min=1761322956919;max=18446744073709551615;plan=0;src=[7:7564824496204797155:2148];cookie=22:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.888506Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.888524Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.890114Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.890118Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.891328Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-10-24T16:22:36.891331Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=3;result=not_found; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=22264;columns=6; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=22264;columns=6; 2025-10-24T16:22:37.123424Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824500499765025:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.123451Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.123636Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824500499765053:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.123656Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824500499765051:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.123665Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.128072Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564824500499765057:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.128109Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.128852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:37.131903Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564824500499765056:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-10-24T16:22:37.200746Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564824500499765109:2493] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.206495Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp:438) WAIT_CLEANING: 0 2025-10-24T16:22:37.712350Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:37.976042Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__table_stats.cpp:353: failed add stats for table with pathId=[OwnerId: 72057594046644480, LocalPathId: 3] 2025-10-24T16:22:37.976073Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__table_stats.cpp:353: failed add stats for table with pathId=[OwnerId: 72057594046644480, LocalPathId: 3] 2025-10-24T16:22:37.976083Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__table_stats.cpp:353: failed add stats for table with pathId=[OwnerId: 72057594046644480, LocalPathId: 3] WAIT_CLEANING: 0 WAIT_CLEANING: 0 WAIT_CLEANING: 3 WAIT_CLEANING: 9 2025-10-24T16:22:41.718437Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7564824496204796849:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:41.718472Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_CLEANING: 9 WAIT_CLEANING: 9 WAIT_CLEANING: 9 WAIT_CLEANING: 9 2025-10-24T16:22:46.442116Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824496204797476:2301];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=5;gen=1;table=/Root/olapStore/.sys/store_primary_index_portion_stats;snapshot={1761322966000:max};tablet=72075186224037888;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:46.448323Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824496204797482:2303];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=5;gen=1;table=/Root/olapStore/.sys/store_primary_index_portion_stats;snapshot={1761322966000:max};tablet=72075186224037889;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:46.450315Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824496204797481:2302];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=5;gen=1;table=/Root/olapStore/.sys/store_primary_index_portion_stats;snapshot={1761322966000:max};tablet=72075186224037890;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:22:46.451071Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[7:7564824496204797521:2304];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=5;gen=1;table=/Root/olapStore/.sys/store_primary_index_portion_stats;snapshot={1761322966000:max};tablet=72075186224037891;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |52.9%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::CompactionVariants[10,true,1024,0,0,0.5] [GOOD] |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-10-24T16:22:45.644334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:45.644726Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824533225101928:2255];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:45.644769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000959/r3tmp/tmps9lRkQ/pdisk_1.dat 2025-10-24T16:22:45.688599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:45.688624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:45.696327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:45.712813Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824533225101708:2081] 1761322965632253 != 1761322965632256 2025-10-24T16:22:45.718631Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:9650 TServer::EnableGrpc on GrpcPort 19990, node 1 2025-10-24T16:22:45.807340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:46.054369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:46.054385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:46.054388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:46.054441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9650 2025-10-24T16:22:46.740223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:46.800926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:47.058380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:47.280870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:22:47.289678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:47.342008Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322966865, tx_id: 1 } } } 2025-10-24T16:22:47.342025Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-10-24T16:22:47.344986Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322967187, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1761322967327, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-10-24T16:22:47.344999Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-10-24T16:22:48.203951Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1761322967187, tx_id: 281474976710658 } } } 2025-10-24T16:22:48.204002Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-10-24T16:22:48.204007Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TableCreator::CreateTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[10,true,1024,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 1170, MsgBus: 2581 2025-10-24T16:22:27.578716Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824457654114873:2181];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.578731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f1/r3tmp/tmp9mYOb0/pdisk_1.dat 2025-10-24T16:22:27.671941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:27.684702Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:27.684754Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824457654114729:2081] 1761322947572940 != 1761322947572943 2025-10-24T16:22:27.687930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:27.687956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1170, node 1 2025-10-24T16:22:27.692130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:27.704037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:27.704047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:27.704049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:27.704096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2581 TClient is connected to server localhost:2581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:27.788266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:27.791061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:22:27.839932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:28.584067Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:29.122390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824461949082709:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.122428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.127935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824466244050015:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.127964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:29.749163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:30.081719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:30.082415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:30.082484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:30.083133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:30.083154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:30.083175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:30.083194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:30.083211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:30.083228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:30.083246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:30.083631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:30.083648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:30.083713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564824466244050133:2326];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:30.084136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:30.084175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:30.084207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:30.084228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:30.084248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:30.084270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:30.084289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824466244050137:2330];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:30.084310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_i ... t=useless_schema_removed;address=0,plan_step=1761322967166;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:48.070084Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824537862136427:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967166;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:48.070085Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824537862136536:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967194;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:48.070087Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824537862136427:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967194;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:48.070117Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[7:7564824537862136536:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322967215;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322967215 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:48.070122Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[7:7564824537862136427:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322967215;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322967215 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:48.070239Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:48.070242Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:48.070244Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:48.070294Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967124;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:48.070299Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967166;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:48.070302Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967194;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:48.070310Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:22:48.070312Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:22:48.070315Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:22:48.070334Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[7:7564824537862136426:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322967215;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322967215 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:22:48.070386Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967124;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:22:48.070391Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967166;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:22:48.070394Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322967194;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:22:48.070426Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[7:7564824537862136428:2329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322967215;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761322967215 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |52.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |52.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |52.9%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |52.9%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |52.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardTrace::TestTraceDistributedUpsert+UseSink |52.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:22:45.507181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:45.507206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:45.507212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:45.507219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:45.507226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:45.507230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:45.507239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:45.507253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:45.507368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:45.507434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:45.521991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:22:45.522019Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:45.525325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:45.525364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:45.525422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:45.526860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:45.527031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:45.527140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.527213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:45.528034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:45.528079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:45.528295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:45.528306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:45.528340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:45.528347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:45.528354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:45.528399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.529719Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:22:45.551060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:45.551161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.551231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:45.551239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:45.551310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:45.551327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:45.558224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.558298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:45.558374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.558386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:45.558392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:45.558399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:45.559009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.559024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:45.559042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:45.559493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.559510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.559528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.559536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:45.560285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:45.560768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:45.560827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:45.561068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.561099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:45.561107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.561178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:45.561186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.561217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:45.561232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:45.561699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:45.561709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... nResult Origin: 72075186233409546 TxId: 101 2025-10-24T16:22:49.056857Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5273: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2025-10-24T16:22:49.056871Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6566: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-10-24T16:22:49.056883Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-10-24T16:22:49.056915Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-10-24T16:22:49.056947Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-10-24T16:22:49.057544Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.057555Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:49.057562Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2025-10-24T16:22:49.057605Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [6:130:2155], Recipient [6:130:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:22:49.057612Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-10-24T16:22:49.057622Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.057630Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:22:49.057650Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.057656Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:22:49.057662Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:49.057667Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:22:49.057671Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:49.057677Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-10-24T16:22:49.057692Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:346:2323] message: TxId: 101 2025-10-24T16:22:49.057702Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:49.057709Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:22:49.057714Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:22:49.057749Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:22:49.058251Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:49.058276Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [6:346:2323] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2025-10-24T16:22:49.058323Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:22:49.058331Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:347:2324] 2025-10-24T16:22:49.058383Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [6:349:2326], Recipient [6:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:22:49.058390Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:22:49.058396Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-10-24T16:22:49.058550Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [6:392:2361], Recipient [6:130:2155]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2025-10-24T16:22:49.058557Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-10-24T16:22:49.059498Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:49.059561Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:357: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2025-10-24T16:22:49.059583Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-10-24T16:22:49.059646Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.060111Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:49.060168Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-10-24T16:22:49.060175Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:22:49.060248Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:22:49.060257Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:22:49.060322Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [6:398:2367], Recipient [6:130:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:22:49.060332Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:22:49.060337Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-10-24T16:22:49.060360Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [6:346:2323], Recipient [6:130:2155]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-10-24T16:22:49.060366Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-10-24T16:22:49.060378Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:22:49.060400Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:22:49.060405Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:396:2365] 2025-10-24T16:22:49.060429Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [6:398:2367], Recipient [6:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:22:49.060434Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:22:49.060438Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-10-24T16:22:49.060496Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [6:399:2368], Recipient [6:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:22:49.060503Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:22:49.060516Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:49.060549Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 31us result status StatusPathDoesNotExist 2025-10-24T16:22:49.060587Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TTransferTests::Alter [GOOD] >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TDataShardTrace::TestTraceDistributedUpsert-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckMultipleAlterWithStorageLimitsError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:22:49.640885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:49.640908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:49.640915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:49.640922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:49.640928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:49.640933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:49.640943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:49.640956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:49.641060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:49.641118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:49.655928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:22:49.655949Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:49.659245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:49.659276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:49.659334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:49.661810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:49.661990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:49.662080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:49.662148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:49.663050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:49.663092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:49.663329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:49.663339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:49.663363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:49.663371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:49.663377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:49.663404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.664763Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:22:49.688223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:49.688290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.688345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:49.688353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:49.688415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:49.688431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:49.689157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:49.689200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:49.689254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.689271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:49.689277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:49.689282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:49.689681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.689691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:49.689697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:49.690003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.690012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.690017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:49.690024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:49.690723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:49.692583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:49.692623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:49.692832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:49.692860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:49.692869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:49.692928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:49.692935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:49.692961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:49.692973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:49.693443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:49.693452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... oard.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-10-24T16:22:49.716901Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: FileStore ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" IOPS: 0 Throughput: 0 Size: 0 } TabletID: 72075186233409546 AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-10-24T16:22:49.716939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6287: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2025-10-24T16:22:49.716944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 106, shardIdx: 72057594046678944:1, partId: 0 2025-10-24T16:22:49.716959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2025-10-24T16:22:49.716968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-10-24T16:22:49.716976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 106:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409546 Origin: 72057594037968897 2025-10-24T16:22:49.716993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 106:0 2 -> 3 2025-10-24T16:22:49.720113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.720166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.720174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:89: TAlterFileStore::TConfigureParts operationId# 106:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:49.720613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2025-10-24T16:22:49.720642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-10-24T16:22:49.720673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-10-24T16:22:49.720697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: TxId: 106 Origin: 72075186233409546 Status: OK 2025-10-24T16:22:49.720702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:43: TAlterFileStore::TConfigureParts operationId# 106:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-10-24T16:22:49.720707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 106:0 3 -> 128 2025-10-24T16:22:49.721085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:197: TAlterFileStore::TPropose operationId# 106:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-10-24T16:22:49.721168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:49.721538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-10-24T16:22:49.721564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000005 2025-10-24T16:22:49.721631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:153: TAlterFileStore::TPropose operationId# 106:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-10-24T16:22:49.721684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#106:0 progress is 1/1 2025-10-24T16:22:49.721689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:22:49.721694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#106:0 progress is 1/1 2025-10-24T16:22:49.721698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:22:49.721710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:22:49.721721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-10-24T16:22:49.721727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-10-24T16:22:49.721732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 106:0 2025-10-24T16:22:49.721736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 106:0 2025-10-24T16:22:49.721757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:22:49.721764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-10-24T16:22:49.721769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:22:49.722412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:49.722423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:22:49.722459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:49.722465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 106, path id: 2 FAKE_COORDINATOR: Erasing txId 106 2025-10-24T16:22:49.722584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-10-24T16:22:49.722613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-10-24T16:22:49.722618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-10-24T16:22:49.722623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:22:49.722629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:22:49.722645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-10-24T16:22:49.723094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-10-24T16:22:49.723174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:22:49.723182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:22:49.723258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:22:49.723277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:22:49.723283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:435:2413] TestWaitNotification: OK eventTxId 106 >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:22:45.823012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:45.823041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:45.823047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:45.823055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:45.823062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:45.823067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:45.823077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:45.823092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:45.823198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:45.823257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:45.850440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:22:45.850465Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:45.857979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:45.858020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:45.858072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:45.859567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:45.859732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:45.859832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.859924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:45.860832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:45.860882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:45.861127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:45.861139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:45.861172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:45.861181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:45.861188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:45.861221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.862772Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:22:45.888762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:45.888844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.888906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:45.888915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:45.888982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:45.888999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:45.889775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.889828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:45.889893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.889904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:45.889910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:45.889916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:45.890393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.890408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:45.890414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:45.890773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.890785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:45.890791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.890798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:45.891489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:45.891907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:45.891956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:45.892154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:45.892182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:45.892190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.892252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:45.892261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:45.892288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:45.892301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:45.892722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:45.892732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ivate::TEvProgressOperation 2025-10-24T16:22:49.640809Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.640815Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2025-10-24T16:22:49.640819Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.640824Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-10-24T16:22:49.640847Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:49.641161Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:561:2504] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-10-24T16:22:49.641202Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-10-24T16:22:49.641209Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2025-10-24T16:22:49.641227Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-10-24T16:22:49.641236Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-10-24T16:22:49.641247Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2025-10-24T16:22:49.641336Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:49.641343Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-10-24T16:22:49.641358Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-10-24T16:22:49.641410Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269287424, Sender [6:139:2161], Recipient [6:263:2253] 2025-10-24T16:22:49.641415Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5250: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:22:49.641424Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:49.641439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805937 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:49.641445Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-10-24T16:22:49.641462Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:22:49.641481Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.641490Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:22:49.641498Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:704: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2025-10-24T16:22:49.641956Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2025-10-24T16:22:49.642016Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:49.642022Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000005 first txId#104 countTxs#1 2025-10-24T16:22:49.642028Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000005 2025-10-24T16:22:49.642033Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2025-10-24T16:22:49.642058Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:22:49.642065Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:22:49.642078Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:49.642082Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:22:49.642112Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:49.642116Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:22:49.642161Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:22:49.642167Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:22:49.642176Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.642180Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:22:49.642184Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:49.642189Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:22:49.642192Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:49.642197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-10-24T16:22:49.642202Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:49.642206Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:22:49.642209Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:22:49.642227Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:22:49.642231Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-10-24T16:22:49.642235Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-10-24T16:22:49.642353Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [6:212:2213], Recipient [6:128:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 4 } 2025-10-24T16:22:49.642358Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-10-24T16:22:49.642369Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:49.642399Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:49.642404Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:22:49.642408Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:22:49.642412Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:22:49.642421Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-10-24T16:22:49.642425Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:22:49.642824Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:49.642904Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:22:49.642908Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 |53.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TableCreator::CreateTables [GOOD] >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop >> TAccessServiceTest::PassRequestId |53.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{10} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{21} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 20 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERo ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{29} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0.5,CATEGORY_BLOOM_FILTER] [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0.5,BLOOM_FILTER] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-10-24T16:22:49.664124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824550073588194:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:49.664154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001792/r3tmp/tmpyCtT6h/pdisk_1.dat 2025-10-24T16:22:49.735315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-10-24T16:22:49.744676Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:10095 TServer::EnableGrpc on GrpcPort 12045, node 1 2025-10-24T16:22:49.777523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:49.777552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:49.777555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:49.777599Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-10-24T16:22:49.797158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:49.797187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:49.797862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:49.798181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:49.800953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:49.802323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:49.802589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:49.917074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions |53.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |53.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TAccessServiceTest::Authenticate >> FolderServiceTest::TFolderServiceTransitional >> TAccessServiceTest::PassRequestId [GOOD] >> TUserAccountServiceTest::Get |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> FolderServiceTest::TFolderService |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::SimultaneousCreateDropNfs >> FolderServiceTest::TFolderServiceAdapter |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |53.1%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0.5,BLOOM_FILTER] [GOOD] >> TFileStoreWithReboots::CheckFileStoreHDDLimits >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000eb7/r3tmp/tmpeM0Gxm/pdisk_1.dat 2025-10-24T16:22:50.910811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.910845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.912158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:50.924211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:50.924277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.924666Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.927989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824556732578069:2081] 1761322970824790 != 1761322970824793 TClient is connected to server localhost:65250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:50.973974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:50.977687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:50.992343Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [10fabfab7360]{trololo} Connect to grpc://localhost:8581 2025-10-24T16:22:50.992849Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [10fabfab7360]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-10-24T16:22:51.000251Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [10fabfab7360]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TFileStoreWithReboots::CheckFileStoreSSDLimits >> TFileStoreWithReboots::AlterAssignDrop >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0.5,BLOOM_FILTER] [GOOD] |53.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomMixIndexesVariants[false,true,1024,0,0,0.5,BLOOM_FILTER] [GOOD] Test command err: Trying to start YDB, gRPC: 5347, MsgBus: 11561 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001300/r3tmp/tmp6IIp4E/pdisk_1.dat 2025-10-24T16:22:19.705324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:19.705344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:19.713679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5347, node 1 2025-10-24T16:22:19.788171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:19.788247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:19.789990Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:19.791258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824421753333788:2081] 1761322939617521 != 1761322939617524 2025-10-24T16:22:19.904178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:19.904188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:19.904190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:19.904226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:20.084152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11561 TClient is connected to server localhost:11561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:20.340911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:20.636019Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:21.956775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824430343269065:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:21.956832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:21.957090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824430343269076:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:21.957101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:21.957177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824430343269078:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:21.957183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:22.348282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:22.535542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:22.535788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:22.539907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:22.539975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:22.539994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:22.540014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:22.540035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:22.540063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:22.540085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:22.540104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:22.540124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:22.540140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:22.540159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824434638236427:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:22.543017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:22.543221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:22.543266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:22.543282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:22.543296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:22.543312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:22.543329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824434638236448:2327];tablet_id=72075186224037888;process=TTxInit ... cyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1b4%" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "%1B4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") ilike "1b5" ORDER BY Col1; 2025-10-24T16:22:51.011436Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824548080007338:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:51.011469Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824548080007338:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=8;to=9; 2025-10-24T16:22:51.011618Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824548080007338:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322970617;tx_id=281474976715676;;is_diff=1;version=8; 2025-10-24T16:22:51.011812Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824548080007338:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322970722;tx_id=281474976715678;;new_schema=Id: 0 SinceStep: 1761322970722 SinceTxId: 281474976715678 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 7 Version: 9 Indexes { Id: 4 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 5 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Indexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 9 DefaultCompression { } UpsertIndexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:51.012107Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824548080007338:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:51.012277Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824548080007320:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:51.012296Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824548080007320:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=8;to=9; 2025-10-24T16:22:51.012378Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824548080007320:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322970617;tx_id=281474976715676;;is_diff=1;version=8; 2025-10-24T16:22:51.012482Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824548080007320:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322970722;tx_id=281474976715678;;new_schema=Id: 0 SinceStep: 1761322970722 SinceTxId: 281474976715678 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 7 Version: 9 Indexes { Id: 4 Name: "index_ngramm_b" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "\"b.c.d\"" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: false } } Indexes { Id: 5 Name: "index_ngramm_a" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "BLOOM_NGRAMM_FILTER" BloomNGrammFilter { NGrammSize: 3 FilterSizeBytes: 4096 HashesCount: 2 ColumnId: 2 RecordsCount: 1024 DataExtractor { ClassName: "SUB_COLUMN" SubColumn { SubColumnName: "a" } } BitsStorage { ClassName: "SIMPLE_STRING" } CaseSensitive: true } } Indexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 9 DefaultCompression { } UpsertIndexes { Id: 6 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:51.012696Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824548080007320:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a\"") = "a4" ORDER BY Col1; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1b3" ORDER BY Col1; COMPARE: [[13u;["{\"b.c.d\":\"1b3\"}"]]] OUTPUT: [[13u;["{\"b.c.d\":\"1b3\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") like "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 >> TServiceAccountServiceTest::Get [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> TAccessServiceTest::Authenticate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-10-24T16:22:50.285860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:50.326095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:50.326193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.326204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017de/r3tmp/tmpkwuu8G/pdisk_1.dat 2025-10-24T16:22:50.422030Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.423018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.423052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.423181Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322969899090 != 1761322969899093 2025-10-24T16:22:50.460656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:50.517356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:50.552876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:50.650238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:50.952299Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:52.738322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:52.738359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:943:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:52.738375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:52.738806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:949:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:52.738821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:52.749872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:52.765649Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-10-24T16:22:52.926688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:947:2773], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:53.011977Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1011:2817] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:53.502432Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfdj1fxcvkrvj672sgxg3, Database: , SessionId: ydb://session/3?node_id=1&id=MzY3YmY3NDAtODk5MWUyZDMtM2U0ZWQ5NmMtZmEwNTU4ZmE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000e85/r3tmp/tmpllq3gz/pdisk_1.dat 2025-10-24T16:22:51.004372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:51.004425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:51.017594Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:51.017937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824558134584680:2081] 1761322970947916 != 1761322970947919 TClient is connected to server localhost:24932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:51.071030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:51.071076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:51.071723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:51.072376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000e85/r3tmp/tmpIWtqkv/pdisk_1.dat 2025-10-24T16:22:52.968721Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:52.968779Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:53.017906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:53.017938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:53.026798Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:53.027390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:53.116774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:53.124295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:53.184775Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> TTabletPipeTest::TestSendWithoutWaitOpen ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-10-24T16:22:51.064225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:51.144785Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:51.144880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:51.144890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017a6/r3tmp/tmpmed2dk/pdisk_1.dat 2025-10-24T16:22:51.229961Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:51.230647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:51.230683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:51.230820Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322970483281 != 1761322970483284 2025-10-24T16:22:51.264851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:51.318614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:51.361398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:51.468268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:51.940145Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TUserAccountServiceTest::Get [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BloomCategoryIndexesVariants[false,true,1,10,1000,0.5,BLOOM_FILTER] [GOOD] Test command err: Trying to start YDB, gRPC: 17759, MsgBus: 62306 2025-10-24T16:22:27.846611Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824457775536659:2246];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:27.846627Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:27.865478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0012f2/r3tmp/tmpdj0kiJ/pdisk_1.dat 2025-10-24T16:22:28.402972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:28.402994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:28.403595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:28.460966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:28.471401Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824457775536450:2081] 1761322947841098 != 1761322947841101 2025-10-24T16:22:28.485587Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17759, node 1 2025-10-24T16:22:28.648532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:28.648543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:28.648545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:28.648581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:28.745080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:28.844087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62306 TClient is connected to server localhost:62306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:29.335209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:29.355081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:22:30.478343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824470660439026:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.478383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.478515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824470660439036:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.478520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:30.900267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:22:31.009012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:31.010955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:31.011246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:31.011266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:31.011712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:31.015255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:22:31.015271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:22:31.015304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:22:31.015319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:22:31.015336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:22:31.015564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:31.015583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:22:31.015604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:22:31.015620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:22:31.015636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:22:31.015653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:22:31.015672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:22:31.015698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564824470660439096:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:22:31.016121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:22:31.016142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564824470660439110:2327];tablet_id=7207518622 ... 511f0-93df3bb4-dd105cd;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a1" ORDER BY Col1; COMPARE: [[1u;["{\"a.b.c\":\"a1\"}"]]] OUTPUT: [[1u;["{\"a.b.c\":\"a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=DROP_INDEX, NAME=a_index) 2025-10-24T16:22:53.188354Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:53.193379Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:53.193728Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715672;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=5;to_version=6;diff=Version: 6 DefaultCompression { } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "1a1" ORDER BY Col1; COMPARE: [[11u;["{\"a.b.c\":\"1a1\"}"]]] OUTPUT: [[11u;["{\"a.b.c\":\"1a1\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_INDEX, NAME=b_index, TYPE=CATEGORY_BLOOM_FILTER, FEATURES=`{"column_name" : "Col2", "false_positive_probability" : 0.01}`) 2025-10-24T16:22:53.353630Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:22:53.369007Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:22:53.369234Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715674;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=6;to_version=7;diff=Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d\"") = "1b4" ORDER BY Col1; COMPARE: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/4/1 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a4" ORDER BY Col1; 2025-10-24T16:22:53.601496Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824558628049010:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:53.601520Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824558628049010:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=6;to=7; 2025-10-24T16:22:53.601646Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824558628049010:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322973242;tx_id=281474976715672;;is_diff=1;version=6; 2025-10-24T16:22:53.601765Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824558628049010:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322973417;tx_id=281474976715674;;new_schema=Id: 0 SinceStep: 1761322973417 SinceTxId: 281474976715674 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 7 Indexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:53.601952Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824558628048995:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:53.601963Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824558628048995:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=6;to=7; 2025-10-24T16:22:53.602011Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824558628048995:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761322973242;tx_id=281474976715672;;is_diff=1;version=6; 2025-10-24T16:22:53.602073Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824558628048995:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761322973417;tx_id=281474976715674;;new_schema=Id: 0 SinceStep: 1761322973417 SinceTxId: 281474976715674 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 1000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: true ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 7 Indexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } Diff { Version: 7 DefaultCompression { } UpsertIndexes { Id: 4 Name: "b_index" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "CATEGORY_BLOOM_FILTER" BloomFilter { FalsePositiveProbability: 0.01 ColumnIds: 2 DataExtractor { ClassName: "DEFAULT" } BitsStorage { ClassName: "SIMPLE_STRING" } } } DropIndexes: 3 Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } } ; 2025-10-24T16:22:53.602249Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564824558628049010:2324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:22:53.602284Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564824558628048995:2323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; COMPARE: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] OUTPUT: [[4u;["{\"a\":\"a4\",\"b.c.d\":\"b4\"}"]];[14u;["{\"a\":\"a4\",\"b.c.d\":\"1b4\"}"]]] INDEX:0/3/2 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"b.c.d111\"") = "1b5" ORDER BY Col1; COMPARE: [] OUTPUT: [] INDEX:0/5/0 HEADER:0/0/0 >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] >> KqpConstraints::AddNonColumnDoesnotReturnInternalError [GOOD] >> KqpConstraints::AlterTableAddColumnWithDefaultCancellation >> FolderServiceTest::TFolderServiceTransitional [GOOD] |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-10-24T16:22:50.777561Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:50.842508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:50.842590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.842600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017b7/r3tmp/tmpyeOQwQ/pdisk_1.dat 2025-10-24T16:22:50.964673Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.965647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.965677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.965786Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322970254099 != 1761322970254102 2025-10-24T16:22:50.998798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:51.048556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:51.096789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:51.183430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:51.537455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:53.819679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.819731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:943:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.819748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.819980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:949:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.819995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.827059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:53.897292Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-10-24T16:22:54.066106Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:947:2773], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:54.145416Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1011:2817] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:54.514304Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfektdhh7k2jgdr1zpcwm, Database: , SessionId: ydb://session/3?node_id=1&id=YmIxYTgyZDgtZjUzZDY4NjAtYjMyZGI1MDQtZDcxY2E1NmY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-10-24T16:22:50.506922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:50.579028Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:50.579115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.579124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017c7/r3tmp/tmpjEz626/pdisk_1.dat 2025-10-24T16:22:50.749080Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.750063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.750103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.750241Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322970092732 != 1761322970092735 2025-10-24T16:22:50.784413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:50.838789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:50.888472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:50.967269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:51.240315Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:53.136664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.136714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:943:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.136740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.137448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:949:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.137694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.150382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:53.184963Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-10-24T16:22:53.347045Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:947:2773], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:53.427640Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1011:2817] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:54.282185Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfdy834m46zsdy1btvdjs, Database: , SessionId: ydb://session/3?node_id=1&id=ZGY1NzMxNTItMTE0MTI4N2ItYjYyNDkzYmYtZDY3NWUyNw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:22:54.331525Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgff3617jmryq0sky4hx55, Database: , SessionId: ydb://session/3?node_id=1&id=YzlkZmM5Y2UtYWJjMWVlZWEtMTk4MjgzODMtOWM2NjgwOGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:22:54.400174Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgff495qye8knfgq86c3jw, Database: , SessionId: ydb://session/3?node_id=1&id=MTcxNTBjZTktNmVjNDkzYjgtZmRiYmIwZmYtODFlYTg2OTU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-10-24T16:22:51.704548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824561238195793:2152];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:51.704841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000e2c/r3tmp/tmpCNLOwG/pdisk_1.dat 2025-10-24T16:22:52.077058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:52.077083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:52.084672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:52.112903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:52.124659Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:52.124882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824561238195675:2081] 1761322971652791 != 1761322971652794 2025-10-24T16:22:52.164079Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:397} StateFunc too long Type# 268639257 Duration# 0.019560s 2025-10-24T16:22:52.287889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:52.519388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:52.542417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:52.583822Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [50ee3faba220] Connect to grpc://localhost:31139 2025-10-24T16:22:52.586637Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ee3faba220] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-10-24T16:22:52.609608Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [50ee3faba220] Status 7 Permission Denied 2025-10-24T16:22:52.612009Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ee3faba220] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-10-24T16:22:52.620757Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [50ee3faba220] Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000ece/r3tmp/tmpohbbSb/pdisk_1.dat 2025-10-24T16:22:52.767811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:52.767846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:52.769883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:52.834399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:52.834455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:52.835530Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:52.839978Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824565274969883:2081] 1761322972432139 != 1761322972432142 2025-10-24T16:22:53.081251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:22:53.439965Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:53.521873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:53.544546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> FolderServiceTest::TFolderService [GOOD] >> SystemView::QueryStatsAllTables [GOOD] >> SystemView::QueryMetricsSimple >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:22:54.182527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:54.182549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.182555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:54.182562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:54.182567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:54.182571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:54.182579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.182590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:54.182905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:54.183658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:54.312350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:22:54.312369Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.336340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:54.336377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:54.337447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:54.341961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:54.342579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:54.342905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.343238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:54.349623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.349670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:54.350147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.350156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.350179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:54.350186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:54.350192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:54.350213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.353429Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.441313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:54.441380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.441440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:54.441448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:54.443225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:54.443253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:54.444343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.444397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:54.444444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.444463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:54.444469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:54.444474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:54.445062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.445074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:54.445080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:54.445842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.445852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.445859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.445866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:54.448979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:54.450813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:54.451422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:54.451647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.451673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:54.451680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.451755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:54.451762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.451794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:54.451820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:54.452841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.452849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 0Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:90: TCreateFileStore::TConfigureParts operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:55.128347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2025-10-24T16:22:55.128393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2025-10-24T16:22:55.129479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2025-10-24T16:22:55.129516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2025-10-24T16:22:55.129525Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:44: TCreateFileStore::TConfigureParts operationId# 110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-10-24T16:22:55.129534Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 110:0 3 -> 128 2025-10-24T16:22:55.130797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:55.130853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:55.130860Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:189: TCreateFileStore::TPropose operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:55.130868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2025-10-24T16:22:55.130900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:55.131196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2025-10-24T16:22:55.131218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2025-10-24T16:22:55.131283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:55.131303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:55.131309Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:155: TCreateFileStore::TPropose operationId# 110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2025-10-24T16:22:55.131329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 110:0 128 -> 240 2025-10-24T16:22:55.131358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:22:55.131373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2025-10-24T16:22:55.136159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:55.136168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:55.136220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:22:55.136239Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:55.136244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 110, path id: 1 2025-10-24T16:22:55.136250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 110, path id: 5 2025-10-24T16:22:55.136324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:55.136331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 110:0 ProgressState 2025-10-24T16:22:55.136344Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:22:55.136349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:55.136354Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:22:55.136357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:55.136360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2025-10-24T16:22:55.136367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:55.136372Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:0 2025-10-24T16:22:55.136376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 110:0 2025-10-24T16:22:55.136407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:22:55.136414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2025-10-24T16:22:55.136417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-10-24T16:22:55.136421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-10-24T16:22:55.136538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:55.136555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:55.136559Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:22:55.136564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-10-24T16:22:55.136568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:22:55.136650Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:55.136660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:55.136664Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:22:55.136667Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-10-24T16:22:55.136671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:22:55.136679Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2025-10-24T16:22:55.137300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-10-24T16:22:55.137322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2025-10-24T16:22:55.137396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2025-10-24T16:22:55.137403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2025-10-24T16:22:55.137475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-10-24T16:22:55.137492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-10-24T16:22:55.137496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:675:2623] TestWaitNotification: OK eventTxId 110 >> TTabletPipeTest::TestSendAfterOpen >> TDataShardLocksTest::UseLocksCache [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-10-24T16:22:52.279504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824564473790126:2249];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:52.279528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f07/r3tmp/tmpXV9C78/pdisk_1.dat 2025-10-24T16:22:52.631283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:52.640201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:52.640448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:52.646436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:52.796757Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:52.799943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824564473789915:2081] 1761322972261439 != 1761322972261442 2025-10-24T16:22:52.875888Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:397} StateFunc too long Type# 268639257 Duration# 0.016058s 2025-10-24T16:22:52.897469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:53.281978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:53.485620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:53.521257Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [1172ffabaa60] Connect to grpc://localhost:2630 2025-10-24T16:22:53.590077Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [1172ffabaa60] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-10-24T16:22:53.597096Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [1172ffabaa60] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2630: Failed to connect to remote host: Connection refused 2025-10-24T16:22:53.611973Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [1172ffabaa60] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-10-24T16:22:53.613278Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [1172ffabaa60] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2630: Failed to connect to remote host: Connection refused 2025-10-24T16:22:54.615734Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [1172ffabaa60] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-10-24T16:22:54.623560Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [1172ffabaa60] Status 5 Not Found 2025-10-24T16:22:54.624024Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [1172ffabaa60] Request ListFoldersRequest { id: "i_am_exists" } 2025-10-24T16:22:54.637562Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [1172ffabaa60] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-10-24T16:22:50.716525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:50.760748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:50.760839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.760850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017ae/r3tmp/tmpyZ8jHl/pdisk_1.dat 2025-10-24T16:22:50.885985Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.887219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.887253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.887371Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322970235815 != 1761322970235818 2025-10-24T16:22:50.918824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:50.976304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:51.015042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:51.106361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:51.408297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:53.302994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.303023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:943:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.303035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.303452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:949:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.303461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:53.305896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:53.329614Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-10-24T16:22:53.496195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:947:2773], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:22:53.660010Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:1011:2817] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:54.221132Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfe3p1sshcm8m54269cr5, Database: , SessionId: ydb://session/3?node_id=1&id=MzI4YTUyMmMtOTVkZmNkZWMtMWIwZThlNTUtYjRjYTliMTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:22:54.362678Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgff3j63w9qyyz76zmaagc, Database: , SessionId: ydb://session/3?node_id=1&id=ZDE3NjIzZTAtNmZhOThlMmQtNTI2MDlmZWQtYTQ2NzE1MWE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:22:54.746415Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgffbk6nv1ac1wmca0kvdc, Database: , SessionId: ydb://session/3?node_id=1&id=YzcxOWU5ZTktZjkyZGFjM2ItYzBiNzliMjAtNmQ4OGVjYzE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-10-24T16:22:53.536666Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824567085062339:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:53.536891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:53.590902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000825/r3tmp/tmpcPaBHQ/pdisk_1.dat 2025-10-24T16:22:53.982099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:53.982121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:53.992493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:54.051718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:54.087531Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.092041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824567085062227:2081] 1761322973509018 != 1761322973509021 2025-10-24T16:22:54.264489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:54.515227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:54.536336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:54.572153Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [50ccbfab7e60] Connect to grpc://localhost:28157 2025-10-24T16:22:54.572349Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ccbfab7e60] Request ListFoldersRequest { id: "i_am_exists" } 2025-10-24T16:22:54.575752Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [50ccbfab7e60] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-10-24T16:22:54.576077Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [50ccbfab8120] Connect to grpc://localhost:29656 2025-10-24T16:22:54.576224Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ccbfab8120] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-10-24T16:22:54.580045Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [50ccbfab8120] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-10-24T16:22:54.580261Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ccbfab8120] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-10-24T16:22:54.581637Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [50ccbfab8120] Status 5 Not Found 2025-10-24T16:22:54.581871Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [50ccbfab7e60] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-10-24T16:22:54.583519Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [50ccbfab7e60] Status 5 Not Found 2025-10-24T16:22:54.591919Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-10-24T16:22:52.741227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824565087902203:2190];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:52.741248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:22:52.771552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00088f/r3tmp/tmp46IdBx/pdisk_1.dat 2025-10-24T16:22:53.363400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:53.370787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:53.370814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:53.376037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:53.426543Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:53.441549Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824565087902048:2081] 1761322972708017 != 1761322972708020 2025-10-24T16:22:53.635997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:53.742582Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:54.065416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:54.083655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:54.094079Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [73f1ffaba220] Connect to grpc://localhost:15852 2025-10-24T16:22:54.148659Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [73f1ffaba220] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-10-24T16:22:54.167969Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [73f1ffaba220] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15852: Failed to connect to remote host: Connection refused 2025-10-24T16:22:54.172498Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [73f1ffaba220] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-10-24T16:22:54.172786Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [73f1ffaba220] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15852: Failed to connect to remote host: Connection refused 2025-10-24T16:22:55.179986Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [73f1ffaba220] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-10-24T16:22:55.187332Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [73f1ffaba220] Status 5 Not Found 2025-10-24T16:22:55.187554Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [73f1ffaba220] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-10-24T16:22:55.188312Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [73f1ffaba220] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TResourceBrokerInstant::Test >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::CreateExternalTable >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TExternalTableTest::DropTableTwice >> TResourceBrokerInstant::Test [GOOD] >> TResourceBrokerInstant::TestErrors |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-10-24T16:22:50.576662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:50.647641Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:50.647739Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:50.647750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014f1/r3tmp/tmpowKcGc/pdisk_1.dat 2025-10-24T16:22:50.791826Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.792804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:50.792835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:50.792957Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322970143383 != 1761322970143386 2025-10-24T16:22:50.830868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:50.887961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:50.923184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:51.010676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:51.024999Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:22:51.025231Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:668:2562], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:22:51.025355Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:22:51.025408Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:681:2569] 2025-10-24T16:22:51.025490Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:51.038752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:22:51.038789Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:668:2562], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:22:51.038969Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:683:2571] 2025-10-24T16:22:51.039017Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:51.040525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:668:2562], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:22:51.040701Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:51.040730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:51.040972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:51.040983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:51.040991Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:51.041051Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:51.041073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:51.041087Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:712:2569] in generation 1 2025-10-24T16:22:51.041163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:51.041181Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:51.041296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:51.041304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:51.041311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:51.041341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:51.041356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:51.041365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:713:2571] in generation 1 2025-10-24T16:22:51.052159Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:51.055975Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:51.056066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:51.056093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:716:2590] 2025-10-24T16:22:51.056098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:51.056102Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:51.056106Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:51.056204Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:681:2569], Recipient [1:681:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:22:51.056212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:22:51.056237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:51.056246Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:51.056257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:51.056266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:717:2591] 2025-10-24T16:22:51.056270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:51.056274Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:51.056278Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:51.056378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:683:2571], Recipient [1:683:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:22:51.056381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:22:51.056416Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:51.056437Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:51.056450Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:51.056455Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:51.056461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:22:51.056465Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:22:51.056468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:22:51.056471Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:51.056475Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:51.056479Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:51.056487Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:51.056596Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:685:2572], Recipient [1:681:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:22:51.056602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:22:51.056606Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:674:2565], serverId# [1:685:2572], sessionId# [0:0:0] 2025-10-24T16:22:51.056611Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:51.056614Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:51.056618Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 7 ... Id: 01k8bgfgapcfkgck5z6vgn99wv, Database: , SessionId: ydb://session/3?node_id=2&id=ZDJiNzhmZTItYzRlNGU1MzAtMTEzMTMzNjUtZjNkMDljMGI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:22:55.685067Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [2:998:2777], Recipient [2:971:2763]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-10-24T16:22:55.685123Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:22:55.685141Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-10-24T16:22:55.685163Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:22:55.685169Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:22:55.685175Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:22:55.685180Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:22:55.685197Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-10-24T16:22:55.685204Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:22:55.685208Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:22:55.685212Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:22:55.685217Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:22:55.685235Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-10-24T16:22:55.685297Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-10-24T16:22:55.685306Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is DelayComplete 2025-10-24T16:22:55.685311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:22:55.685315Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:22:55.685319Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:22:55.685333Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:22:55.685337Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:22:55.685343Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037888 has finished 2025-10-24T16:22:55.685349Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:22:55.695802Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:22:55.695840Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:22:55.695884Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:22:55.695898Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-10-24T16:22:55.695932Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:55.695961Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:22:55.695967Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:2] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:22:55.695979Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[2:998:2777], 0} after executionsCount# 1 2025-10-24T16:22:55.695990Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[2:998:2777], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:22:55.696014Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[2:998:2777], 0} finished in read 2025-10-24T16:22:55.696074Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [2:971:2763], Recipient [2:683:2571]: {TEvReadSet step# 2500 txid# 281474976710661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-10-24T16:22:55.696083Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:22:55.696091Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710661 2025-10-24T16:22:55.696440Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [2:998:2777], Recipient [2:971:2763]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:22:55.696453Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:22:55.696506Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [2:998:2777], Recipient [2:683:2571]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-10-24T16:22:55.696544Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-10-24T16:22:55.696558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-10-24T16:22:55.696576Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037889 is Executed 2025-10-24T16:22:55.696584Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-10-24T16:22:55.696590Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:22:55.696595Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:22:55.696610Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2025-10-24T16:22:55.696616Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037889 is Executed 2025-10-24T16:22:55.696620Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:22:55.696624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-10-24T16:22:55.696628Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-10-24T16:22:55.696645Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-10-24T16:22:55.696703Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-10-24T16:22:55.696711Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[2:998:2777], 1} after executionsCount# 1 2025-10-24T16:22:55.696717Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[2:998:2777], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:22:55.696727Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[2:998:2777], 1} finished in read 2025-10-24T16:22:55.696735Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037889 is Executed 2025-10-24T16:22:55.696738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-10-24T16:22:55.696743Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:22:55.696747Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:22:55.696755Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037889 is Executed 2025-10-24T16:22:55.696758Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:22:55.696763Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:6] at 72075186224037889 has finished 2025-10-24T16:22:55.696768Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-10-24T16:22:55.696786Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-10-24T16:22:55.696877Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [2:998:2777], Recipient [2:683:2571]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-10-24T16:22:55.696885Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-10-24T16:22:53.252086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824566954511341:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:53.252103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000e68/r3tmp/tmpPr5mnC/pdisk_1.dat 2025-10-24T16:22:53.924748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:53.935095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:53.935362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:53.938269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:54.048904Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.051935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824566954511302:2081] 1761322973248223 != 1761322973248226 2025-10-24T16:22:54.116089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:54.275895Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:54.385866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:54.406289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000e68/r3tmp/tmpCqON8I/pdisk_1.dat 2025-10-24T16:22:55.501254Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:55.501292Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:55.515737Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:55.515774Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:55.517084Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:55.517874Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824577270847902:2081] 1761322975467948 != 1761322975467951 2025-10-24T16:22:55.525952Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:22:55.588395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:55.596353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> KqpWorkload::STOCK [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertIntoTwoStreams |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |53.2%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> AsyncIndexChangeCollector::DeleteNothing >> TResourceBrokerInstant::TestErrors [GOOD] >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreSSDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:22:54.846356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:54.846378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.846383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:54.846390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:54.846396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:54.846400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:54.846408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.846419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:54.847015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:54.847438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:54.870129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:22:54.870148Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.873105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:54.873128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:54.873165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:54.874190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:54.874323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:54.874395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.874449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:54.875121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.875158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:54.875337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.875345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.875376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:54.875382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:54.875388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:54.875407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.876512Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.919064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:54.919122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.919175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:54.919181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:54.920278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:54.920292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:54.921559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.921601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:54.921648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.921661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:54.921666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:54.921671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:54.922187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.922195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:54.922200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:54.922932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.922942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.922947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.922952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:54.925828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:54.927608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:54.928451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:54.928872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.928894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:54.928899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.928951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:54.928957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.928982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:54.928992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:54.929756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.929763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:90: TCreateFileStore::TConfigureParts operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.145487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2025-10-24T16:22:56.145520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2025-10-24T16:22:56.146597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2025-10-24T16:22:56.146629Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2025-10-24T16:22:56.146636Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:44: TCreateFileStore::TConfigureParts operationId# 110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-10-24T16:22:56.146644Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 110:0 3 -> 128 2025-10-24T16:22:56.147222Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147296Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147305Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:189: TCreateFileStore::TPropose operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2025-10-24T16:22:56.147341Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.147800Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2025-10-24T16:22:56.147826Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2025-10-24T16:22:56.147915Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147938Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 17179871344 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147945Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:155: TCreateFileStore::TPropose operationId# 110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2025-10-24T16:22:56.147965Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 110:0 128 -> 240 2025-10-24T16:22:56.147994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:22:56.148007Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2025-10-24T16:22:56.148381Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.148390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.148435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:22:56.148451Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.148456Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 110, path id: 1 2025-10-24T16:22:56.148462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 110, path id: 5 2025-10-24T16:22:56.148551Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.148558Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 110:0 ProgressState 2025-10-24T16:22:56.148573Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:22:56.148577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:56.148583Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:22:56.148586Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:56.148590Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2025-10-24T16:22:56.148596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:22:56.148601Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:0 2025-10-24T16:22:56.148605Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 110:0 2025-10-24T16:22:56.148626Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:22:56.148633Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2025-10-24T16:22:56.148637Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-10-24T16:22:56.148641Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-10-24T16:22:56.148734Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:56.148745Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:56.148750Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:22:56.148755Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-10-24T16:22:56.148759Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:22:56.148859Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:56.148870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:22:56.148875Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:22:56.148879Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-10-24T16:22:56.148885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:22:56.148894Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2025-10-24T16:22:56.149434Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-10-24T16:22:56.149454Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2025-10-24T16:22:56.149522Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2025-10-24T16:22:56.149529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2025-10-24T16:22:56.149601Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-10-24T16:22:56.149619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-10-24T16:22:56.149624Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [4:675:2623] TestWaitNotification: OK eventTxId 110 >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] |53.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |53.3%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::ParallelCreateExternalTable [GOOD] |53.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |53.3%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |53.3%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |53.3%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::DropExternalTable >> CdcStreamChangeCollector::InsertSingleRow >> TExternalTableTest::SchemeErrors |53.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:56.387280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:56.387317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.387324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:56.387332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:56.387340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:56.387346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:56.387355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.387371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:56.387495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.387559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:56.411384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:56.411425Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:56.411537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.415227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:56.415267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:56.415320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:56.416789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:56.416987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:56.417099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.417179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:56.418073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.418122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:56.418374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.418388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.418412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:56.418421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.418427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:56.418484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.419810Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:56.449096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.449183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.449247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:56.449255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:56.449335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:56.449351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:56.450411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.450473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:56.450541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.450554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:56.450560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:56.450566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:56.451073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.451092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.451098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:56.451440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.451450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.451455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.451462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:56.452161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.452646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:56.452702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:56.452911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.452939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.452949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.453007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:56.453014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.453047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:56.453059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:56.453572Z node 1 :FLAT_TX_SCHEMESHARD ... IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:22:56.464265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:22:56.464268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:22:56.464272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:22:56.464277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:22:56.464282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:22:56.464286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:22:56.464297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:22:56.464303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:22:56.464307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-10-24T16:22:56.464311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:22:56.464497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.464511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.464516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:22:56.464520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-10-24T16:22:56.464525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:22:56.464647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.464659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.464663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:22:56.464667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:22:56.464671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:22:56.464682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:22:56.466024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:22:56.466474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:22:56.466531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:22:56.466538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:22:56.466607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:22:56.466625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:22:56.466631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:336:2325] TestWaitNotification: OK eventTxId 102 2025-10-24T16:22:56.466701Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.466737Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 47us result status StatusSuccess 2025-10-24T16:22:56.466830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-10-24T16:22:56.467735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.467790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-10-24T16:22:56.467806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-10-24T16:22:56.467835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:132, at schemeshard: 72057594046678944 2025-10-24T16:22:56.468442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:132" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-10-24T16:22:56.468492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:132, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:22:56.468554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:22:56.468560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:22:56.468628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:22:56.468642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:22:56.468647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:344:2333] TestWaitNotification: OK eventTxId 103 >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2141] Leader for TabletID 9437185 is [0:0:0] sender: [1:115:2057] recipient: [1:112:2142] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:115:2057] recipient: [1:112:2142] Leader for TabletID 9437184 is [1:122:2149] sender: [1:123:2057] recipient: [1:110:2141] Leader for TabletID 9437185 is [1:125:2151] sender: [1:127:2057] recipient: [1:112:2142] Leader for TabletID 9437184 is [1:122:2149] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:164:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:166:2057] recipient: [1:107:2140] Leader for TabletID 9437185 is [1:125:2151] sender: [1:169:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:171:2057] recipient: [1:170:2180] Leader for TabletID 9437185 is [1:172:2181] sender: [1:173:2057] recipient: [1:170:2180] Leader for TabletID 9437185 is [1:172:2181] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:122:2149] sender: [1:204:2057] recipient: [1:106:2139] Leader for TabletID 9437184 is [1:122:2149] sender: [1:207:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:209:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:238:2057] recipient: [1:14:2061] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2025-10-24T16:22:56.205408Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:22:56.205509Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=0 resources={100, 100} 2025-10-24T16:22:56.205518Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:56.205526Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:22:56.205532Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:56.205543Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:56.205559Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-1 (1 by [1:105:2138]) (priority=0 type=compaction0 resources={80, 70} resubmit=0) 2025-10-24T16:22:56.205564Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:56.205570Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 160.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:56.205580Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {80, 70}) 2025-10-24T16:22:56.205587Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 160.000000 to 0.000000 (remove task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:56.462694Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:22:56.462799Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:104:2138]) priority=0 resources={100, 100} 2025-10-24T16:22:56.462809Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:22:56.462817Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-1 (1 by [2:104:2138]) from queue queue_compaction0 2025-10-24T16:22:56.462823Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:22:56.462833Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 200.000000 (insert task task-1 (1 by [2:104:2138])) 2025-10-24T16:22:56.462849Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:104:2138]) priority=0 resources={100500, 100500} 2025-10-24T16:22:56.462854Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:408: SubmitTask failed for task 1 to [2:104:2138]: task with the same ID has been already submitted 2025-10-24T16:22:56.462868Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 2 to [2:104:2138]: cannot finish unknown task 2025-10-24T16:22:56.462874Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:56.289368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:56.289399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.289404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:56.289413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:56.289420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:56.289425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:56.289434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.289456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:56.289582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.289643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:56.313822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:56.313871Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:56.313995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.324397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:56.324441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:56.324523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:56.329762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:56.329995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:56.330098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.330170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:56.331073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.331118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:56.331328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.331335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.331349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:56.331363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.331368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:56.331403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.332585Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:56.348842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.348937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.349008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:56.349015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:56.349069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:56.349081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:56.350267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.350344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:56.350425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.350438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:56.350445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:56.350451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:56.351022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.351035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.351041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:56.351489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.351506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.351514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.351523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:56.352234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.352823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:56.352905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:56.353166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.353201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.353213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.353292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:56.353300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.353341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:56.353354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:56.353869Z node 1 :FLAT_TX_SCHEMESHARD ... essState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.515239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-10-24T16:22:56.515274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.515464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.515478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.515483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-10-24T16:22:56.515489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-10-24T16:22:56.515496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-10-24T16:22:56.516060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.516084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.516091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-10-24T16:22:56.516097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-10-24T16:22:56.516103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:22:56.516205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-10-24T16:22:56.517246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-10-24T16:22:56.517294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2025-10-24T16:22:56.517595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-10-24T16:22:56.517712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-10-24T16:22:56.517759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.517783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.517793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-10-24T16:22:56.517830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 129:0 128 -> 240 2025-10-24T16:22:56.517883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:22:56.517894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 FAKE_COORDINATOR: Erasing txId 129 2025-10-24T16:22:56.518347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.518357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.518410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:22:56.518426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.518434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:493:2450], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-10-24T16:22:56.518440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:493:2450], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-10-24T16:22:56.518499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.518508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2025-10-24T16:22:56.518523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#129:0 progress is 1/1 2025-10-24T16:22:56.518528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-10-24T16:22:56.518534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#129:0 progress is 1/1 2025-10-24T16:22:56.518538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-10-24T16:22:56.518542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-10-24T16:22:56.518548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-10-24T16:22:56.518553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 129:0 2025-10-24T16:22:56.518558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 129:0 2025-10-24T16:22:56.518572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:22:56.518579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-10-24T16:22:56.518584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:22:56.518587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-10-24T16:22:56.518737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.518747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.518752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-10-24T16:22:56.518757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:22:56.518762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-10-24T16:22:56.518875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.518888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-10-24T16:22:56.518892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-10-24T16:22:56.518896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-10-24T16:22:56.518900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-10-24T16:22:56.518909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-10-24T16:22:56.519763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-10-24T16:22:56.520161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 20771, MsgBus: 25666 2025-10-24T16:22:36.391232Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824493796326939:2253];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:36.391267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d4b/r3tmp/tmpZSybPp/pdisk_1.dat 2025-10-24T16:22:36.443360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:36.463442Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824493796326722:2081] 1761322956387528 != 1761322956387531 2025-10-24T16:22:36.465170Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20771, node 1 2025-10-24T16:22:36.474920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:36.474932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:36.474933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:36.474974Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25666 2025-10-24T16:22:36.494609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:36.494633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:36.497003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:36.539195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:36.542412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:22:36.680364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:36.865100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824493796327397:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.865130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.865310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824493796327407:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.865317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:36.921355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:36.945458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.049798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:37.182328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824498091298556:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.182370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.182550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824498091298561:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.182560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824498091298562:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.182643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:37.183647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:37.187945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:22:37.188070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824498091298565:2624], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:22:37.281146Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824498091298616:4842] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:37.391136Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:41.392598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824493796326939:2253];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:41.392628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.338229s took: 0.330118s took: 0.337908s took: 0.334531s took: 0.347852s took: 0.352789s took: 0.355401s took: 0.357108s took: 0.344202s took: 0.349649s 2025-10-24T16:22:49.593392Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976716148; 2025-10-24T16:22:49.596586Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:911: SelfId: [1:7564824549630917137:5127], Table: `/Root/stock` ([72057594046644480:2:1]), SessionActorId: [1:7564824545335948382:5127]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7564824549630917137:5127].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-10-24T16:22:49.596806Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [1:7564824549630916237:5127], SessionActorId: [1:7564824545335948382:5127], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7564824545335948382:5127]. 2025-10-24T16:22:49.596870Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=1&id=Yzg3YTMwYjQtZWQwYzhkN2ItMjUwZWU0Y2EtMzQxMTE2Y2Q=, ActorId: [1:7564824545335948382:5127], ActorState: ExecuteState, TraceId: 01k8bgf9stcad1q23arrnvpeea, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7564824549630917077:5127] from: [1:7564824549630916237:5127] 2025-10-24T16:22:49.596893Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [1:756482454 ... abletStatus from node 1, TabletId: 72075186224037891 not found 2025-10-24T16:22:55.875488Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-10-24T16:22:55.879253Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-10-24T16:22:55.879268Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-10-24T16:22:55.879270Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-10-24T16:22:55.879273Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-10-24T16:22:55.879276Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-10-24T16:22:55.879278Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-10-24T16:22:55.879281Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-10-24T16:22:55.879283Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-10-24T16:22:55.879286Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-10-24T16:22:55.879288Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-10-24T16:22:55.879291Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-10-24T16:22:55.879293Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-10-24T16:22:55.879296Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-10-24T16:22:55.879298Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-10-24T16:22:55.879302Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-10-24T16:22:55.880812Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-10-24T16:22:55.880827Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-10-24T16:22:55.891075Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-10-24T16:22:55.891135Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-10-24T16:22:55.891141Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-10-24T16:22:55.891142Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-10-24T16:22:55.891169Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-10-24T16:22:55.891172Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-10-24T16:22:55.891174Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-10-24T16:22:55.891177Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-10-24T16:22:55.891240Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-10-24T16:22:55.891245Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-10-24T16:22:55.891316Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-10-24T16:22:55.907317Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-10-24T16:22:55.907336Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-10-24T16:22:55.908023Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-10-24T16:22:55.908033Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-10-24T16:22:55.908448Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-10-24T16:22:55.912350Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-10-24T16:22:55.912363Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-10-24T16:22:55.912365Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-10-24T16:22:55.912531Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-10-24T16:22:55.912540Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-10-24T16:22:55.912542Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-10-24T16:22:55.912544Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-10-24T16:22:55.912547Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-10-24T16:22:55.912550Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-10-24T16:22:55.914806Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-10-24T16:22:55.914818Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-10-24T16:22:55.914819Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-10-24T16:22:55.914821Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-10-24T16:22:55.914822Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-10-24T16:22:55.914823Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-10-24T16:22:55.915806Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-10-24T16:22:55.915822Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-10-24T16:22:55.915825Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-10-24T16:22:55.915827Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-10-24T16:22:55.916166Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-10-24T16:22:55.916178Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-10-24T16:22:55.916181Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-10-24T16:22:55.916183Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-10-24T16:22:55.916185Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-10-24T16:22:55.916186Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-10-24T16:22:55.916189Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-10-24T16:22:55.916191Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-10-24T16:22:55.916193Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-10-24T16:22:55.916214Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-10-24T16:22:55.916216Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-10-24T16:22:55.916224Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-10-24T16:22:55.916226Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-10-24T16:22:55.916228Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-10-24T16:22:55.916230Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-10-24T16:22:55.916232Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found |53.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> TExternalTableTest::ReplaceExternalTableIfNotExists |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:56.497532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:56.497558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.497562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:56.497569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:56.497576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:56.497579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:56.497586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.497599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:56.497710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.497762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:56.518454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:56.518494Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:56.518615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.523483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:56.523528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:56.523589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:56.525264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:56.525455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:56.525593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.525685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:56.526821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.526881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:56.527175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.527190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.527215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:56.527224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.527231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:56.527286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.528837Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:56.551318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.551430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.551518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:56.551526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:56.551583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:56.551598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:56.552615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.552704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:56.552792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.552806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:56.552813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:56.552819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:56.553376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.553393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.553400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:56.554477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.554491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.554499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.554507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:56.555241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.555736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:56.555781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:56.556031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.556054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.556061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.556114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:56.556119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.556150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:56.556164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:56.556546Z node 1 :FLAT_TX_SCHEMESHARD ... pth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922405Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922420Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 16us result status StatusSuccess 2025-10-24T16:22:56.922468Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922560Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922570Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 12us result status StatusSuccess 2025-10-24T16:22:56.922623Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922679Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922689Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 11us result status StatusSuccess 2025-10-24T16:22:56.922732Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.922781Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 11us result status StatusSuccess 2025-10-24T16:22:56.922834Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:56.262083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:56.262109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.262114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:56.262121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:56.262127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:56.262131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:56.262141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:56.262159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:56.262264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.262339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:56.283934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:56.283971Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:56.284065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:56.288344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:56.288384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:56.288419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:56.289805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:56.289979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:56.290093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.290166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:56.291045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.291093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:56.291328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:56.291342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:56.291367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:56.291375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:56.291381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:56.291424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.292721Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:56.322922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.323010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.323078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:56.323086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:56.323158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:56.323173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:56.324140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.324192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:56.324257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.324267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:56.324273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:56.324278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:56.324770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.324786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:56.324792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:56.325166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.325177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:56.325183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.325190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:56.325858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:56.326282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:56.326339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:56.326545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:56.326571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:56.326578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.326635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:56.326642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:56.326673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:56.326685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:56.327062Z node 1 :FLAT_TX_SCHEMESHARD ... efCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:22:56.582819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.582830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:22:56.582833Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:22:56.582838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:22:56.582842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:22:56.582850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:22:56.583099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:22:56.583373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:22:56.583388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:22:56.583428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:22:56.583435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:22:56.583498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:22:56.583512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:22:56.583516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:340:2329] TestWaitNotification: OK eventTxId 102 2025-10-24T16:22:56.583583Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.583610Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 36us result status StatusSuccess 2025-10-24T16:22:56.583717Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-10-24T16:22:56.584492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:56.584547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-10-24T16:22:56.584559Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-10-24T16:22:56.584581Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133, at schemeshard: 72057594046678944 2025-10-24T16:22:56.585003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-10-24T16:22:56.585042Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:22:56.585096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:22:56.585101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:22:56.585159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:22:56.585172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:22:56.585176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:348:2337] TestWaitNotification: OK eventTxId 103 2025-10-24T16:22:56.585232Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:56.585252Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 25us result status StatusSuccess 2025-10-24T16:22:56.585316Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |53.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |53.4%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |53.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> KqpConstraints::AlterTableAddColumnWithDefaultCancellation [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TxUsage::WriteToTopic_Demo_44_Query [GOOD] >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TExternalTableTest::ParallelCreateSameExternalTable >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::Decimal [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:57.413028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:57.413052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.413059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:57.413067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:57.413074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:57.413078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:57.413089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.413103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:57.413213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.413269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:57.482890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:57.482920Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:57.483000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.504203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:57.504252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:57.505277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:57.519384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:57.519499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:57.520105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.520218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:57.532306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.532387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:57.532771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:57.532788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.532801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:57.532812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:57.532818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:57.532851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.536399Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:22:57.614357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.614437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.614503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:57.614510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:57.614553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:57.614567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:57.616248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.616300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:57.616350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.616358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:57.616362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:57.616366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:57.621390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.621414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:57.621422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:57.625146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.625163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.625170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.625190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:57.625919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:57.628088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:57.628876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:57.629152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.629191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.629200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.629266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:57.629276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.629311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:57.629324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:57.635297Z node 1 :FLAT_TX_SCHEMESHARD ... eration.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-10-24T16:22:57.660027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.660071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-10-24T16:22:57.660626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.660657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-10-24T16:22:57.660665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-10-24T16:22:57.660678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-10-24T16:22:57.661203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.661231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-10-24T16:22:57.661700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.661729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-10-24T16:22:57.661737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-10-24T16:22:57.661750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-10-24T16:22:57.662036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.662058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-10-24T16:22:57.662520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.663064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-10-24T16:22:57.663072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-10-24T16:22:57.663084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-10-24T16:22:57.663437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.663462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-10-24T16:22:57.663941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.663971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-10-24T16:22:57.663979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-10-24T16:22:57.664019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-10-24T16:22:57.664433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.664459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-10-24T16:22:57.664915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.664944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-10-24T16:22:57.664951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-10-24T16:22:57.664967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:167, at schemeshard: 72057594046678944 2025-10-24T16:22:57.666885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:167" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.666913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:167, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TResourceBroker::TestRealUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:57.962142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:57.962165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.962171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:57.962179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:57.962186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:57.962190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:57.962200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.962215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:57.962348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.962409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:58.007901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:58.007925Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.008018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:58.024223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:58.024257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:58.024305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:58.044845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:58.045284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:58.045854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.045947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:58.053637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.053692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:58.053906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:58.053912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.054111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:58.054117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:58.054121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:58.054160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.057187Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:58.128556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:58.128648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.128718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:58.128726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:58.128789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:58.128803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.130018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.130075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:58.130141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.130151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:58.130157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:58.130162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:58.130686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.130696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:58.130701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:58.133409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.133421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.133427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.133434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:58.134033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:58.136065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:58.136117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:58.136326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.136351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.136359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.136409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:58.136415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.136446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:58.136456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:58.136783Z node 1 :FLAT_TX_SCHEMESHARD ... ediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.216170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-10-24T16:22:58.216206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:22:58.216240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:22:58.216250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:22:58.216503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:22:58.216988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:22:58.217104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:58.217111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:58.217153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:22:58.217166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:22:58.217179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.217184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-10-24T16:22:58.217189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:22:58.217193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:22:58.217258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.217266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:22:58.217282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:22:58.217286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:58.217292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:22:58.217295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:58.217299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-10-24T16:22:58.217304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:22:58.217309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:22:58.217313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:22:58.217325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:22:58.217329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:22:58.217336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-10-24T16:22:58.217340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:22:58.217344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-10-24T16:22:58.217477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:58.217490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:58.217496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:22:58.217501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:22:58.217505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:22:58.217722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:58.217735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:22:58.217739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:22:58.217744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:22:58.217748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:22:58.217759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-10-24T16:22:58.218140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:22:58.218291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:22:58.218362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:22:58.218369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:22:58.218452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:22:58.218468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.218473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:401:2390] TestWaitNotification: OK eventTxId 104 2025-10-24T16:22:58.218556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.218583Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 39us result status StatusSuccess 2025-10-24T16:22:58.218669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTabletPipeTest::TestOpen >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] >> TResourceBrokerInstant::TestMerge >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TTabletResolver::NodeProblem >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:57.549477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:57.549507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.549513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:57.549521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:57.549529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:57.549533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:57.549542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.549560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:57.549671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.549733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:57.678925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:57.678968Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:57.679075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.693124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:57.693171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:57.694018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:57.728528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:57.729319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:57.729911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.730040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:57.733203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.733264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:57.733559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:57.733569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.733591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:57.733600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:57.733607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:57.733657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.744352Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:57.928978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.929074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.929153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:57.929166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:57.931640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:57.931671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:57.948621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.948696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:57.948774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.948788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:57.948794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:57.948800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:57.956368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.956397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:57.956405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:57.960189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.960216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.960225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.960236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:57.960976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:57.968115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:57.968195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:57.968428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.968473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.968480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.968553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:57.968560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.968599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:57.968611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:57.975121Z node 1 :FLAT_TX_SCHEMESHARD ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:22:58.000708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:22:58.000712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:22:58.000715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-10-24T16:22:58.001274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.001284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.001288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:22:58.001292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:22:58.001296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:22:58.005335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.005363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.005369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:22:58.005375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:22:58.005381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:22:58.005398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:22:58.008824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:22:58.008877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:22:58.008923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:22:58.008930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:22:58.008994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:22:58.009010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.009015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2300] TestWaitNotification: OK eventTxId 101 2025-10-24T16:22:58.009086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.009126Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 49us result status StatusSuccess 2025-10-24T16:22:58.009226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-10-24T16:22:58.010103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:58.010153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-10-24T16:22:58.010164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-10-24T16:22:58.010170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-10-24T16:22:58.017796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.017855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:22:58.017931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:22:58.017938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:22:58.018012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:22:58.018032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.018038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:319:2308] TestWaitNotification: OK eventTxId 102 2025-10-24T16:22:58.018106Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.018131Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 30us result status StatusPathDoesNotExist 2025-10-24T16:22:58.018165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpConstraints::AlterTableAddColumnWithDefaultCancellation [GOOD] Test command err: Trying to start YDB, gRPC: 8227, MsgBus: 23875 2025-10-24T16:22:08.944698Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824376898284739:2083];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:22:08.946906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008b8/r3tmp/tmpwAzSXd/pdisk_1.dat 2025-10-24T16:22:09.053547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:09.060498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:09.060530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:09.060913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:09.083073Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8227, node 1 2025-10-24T16:22:09.355991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:09.408235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:09.408245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:09.408247Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:09.408285Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23875 TClient is connected to server localhost:23875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:09.874271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:09.881685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:09.895132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:09.950199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:10.064816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:22:10.233128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:10.300353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:10.864791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824385488220937:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.864836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.864976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824385488220947:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:10.864981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.148885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.171054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.186137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.207195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.227528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.252217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.266208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.301733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:11.381211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824389783189116:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.381233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.381333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824389783189121:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.381338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824389783189122:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:11.381380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, ... >: Error: Execution, code: 1060
:2:91: Error: At tuple, At tuple, At function: KqlDeleteRows, At function: Map, At function: Filter
:2:29: Error: At lambda
:2:89: Error: At function: ==
:2:82: Error: At function: Member, At function: Member, At function: Member
:2:82: Error: Member not found: Value3. Did you mean Value? 2025-10-24T16:22:24.562169Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=ZWU1MTU2MDgtOGJjY2ZkMi02MmY2MzRkZS01Yjc3NDRmZg==, ActorId: [7:700:2577], ActorState: ExecuteState, TraceId: 01k8bgej1c4psahyq9wv70hp7y, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:22:24.603673Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710760:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) GRpc shutdown warning: left infly: 1, spent: 3.454448 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 7.181035 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 10.866338 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 14.446265 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 17.893047 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 21.387912 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 24.751794 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 28.355984 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: failed to shutdown all connections, left infly: 1, spent: 30.002154 sec. GRpcShutdownDeadline: 30 Trying to start YDB, gRPC: 17385, MsgBus: 25790 2025-10-24T16:22:55.263030Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:22:55.263079Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008b8/r3tmp/tmpA8ISk9/pdisk_1.dat 2025-10-24T16:22:55.378868Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:55.378894Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:55.381582Z node 8 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:55.382396Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [8:7564824575904929334:2081] 1761322975239268 != 1761322975239271 2025-10-24T16:22:55.400358Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17385, node 8 2025-10-24T16:22:55.425513Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:22:55.425523Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:22:55.425525Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:22:55.425576Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:22:55.444775Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25790 TClient is connected to server localhost:25790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:22:55.537141Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:22:55.544394Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:22:55.559944Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:22:56.238686Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:22:56.906053Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:56.906497Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:56.907835Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:57.058660Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824584494865718:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:57.058698Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:57.061977Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824584494865732:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:57.061993Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7564824584494865733:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:57.062015Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:22:57.063038Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:22:57.072444Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-10-24T16:22:57.072617Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7564824584494865738:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-10-24T16:22:57.136636Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=8&id=MWNlOGQ5YjAtNGRiMDVjZmEtODc1MGFmYjctNDJkMzZlMzQ=, ActorId: [8:7564824584494865710:2466], ActorState: ExecuteState, TraceId: 01k8bgfhm8drae7fvf3mqnk2gm, Create QueryResponse for error on request, msg: 2025-10-24T16:22:57.137958Z node 8 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [8:7564824584494865842:3168] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:22:57.495595Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:7564824584494866035:2567], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At tuple, At function: SqlProjectItem, At lambda
:2:24: Error: At function: Member
:2:24: Error: Member not found: DefaultValue 2025-10-24T16:22:57.496084Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=8&id=NDhhZWI3YTktMmQxYzM0ZWYtNWRjMmNjYTEtNDA4YjY4MzM=, ActorId: [8:7564824584494866028:2563], ActorState: ExecuteState, TraceId: 01k8bgfj6g0sx6f5046gx7qeek, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |53.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> TTabletPipeTest::TestOpen [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:57.388919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:57.388939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.388943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:57.388949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:57.388954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:57.388957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:57.388965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:57.388975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:57.389057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.389099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:57.441594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:57.441629Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:57.441725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:57.460882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:57.460928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:57.460981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:57.462400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:57.462539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:57.462655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.462739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:57.463593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.463644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:57.463917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:57.463928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:57.463950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:57.463959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:57.463966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:57.464016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.465405Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:57.539352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:57.539431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.539504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:57.539512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:57.539577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:57.539590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:57.554223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.554293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:57.554384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.554398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:57.554403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:57.554410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:57.554900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.554909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:57.554915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:57.555613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.555622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:57.555628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.555635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:57.566661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:57.579136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:57.580056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:57.580374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:57.580426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:57.580440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.580524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:57.580535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:57.580581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:57.580598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:57.581759Z node 1 :FLAT_TX_SCHEMESHARD ... schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:22:58.486056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:22:58.486097Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.486103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-10-24T16:22:58.486109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-10-24T16:22:58.486113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-10-24T16:22:58.486118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:22:58.486181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.486189Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:22:58.486205Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:22:58.486210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:58.486217Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:22:58.486242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:58.486253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:22:58.486260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:22:58.486265Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:22:58.486270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:22:58.486287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:22:58.486292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:22:58.486299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-10-24T16:22:58.486304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:22:58.486307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-10-24T16:22:58.486328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:22:58.486600Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.486617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.486622Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:22:58.486628Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:22:58.486633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:22:58.486849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.486861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.486866Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:22:58.486870Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:22:58.486875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:22:58.487008Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.487018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:22:58.487021Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:22:58.487025Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:22:58.487028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:22:58.487037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:22:58.487276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:22:58.487567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:22:58.487585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:22:58.487633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:22:58.487640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:22:58.487722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:22:58.487739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.487744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:340:2329] TestWaitNotification: OK eventTxId 101 2025-10-24T16:22:58.487820Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.487888Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 79us result status StatusSuccess 2025-10-24T16:22:58.488004Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTabletPipeTest::TestSendAfterReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:107:2139] ... blocking block result NO_GROUP for [1:108:2139] ... blocking block result NO_GROUP for [1:109:2139] ... blocking block result NO_GROUP for [1:110:2139] >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::ColumnShardCounters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::WriteToTopic_Demo_44_Query [GOOD] Test command err: 2025-10-24T16:20:40.914522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823996167122313:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:40.914546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:40.923036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:20:40.924908Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001365/r3tmp/tmp9ieGQT/pdisk_1.dat 2025-10-24T16:20:40.967258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:40.967286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:40.968532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:40.969031Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3121, node 1 2025-10-24T16:20:41.023513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001365/r3tmp/yandexMSgkhA.tmp 2025-10-24T16:20:41.023527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001365/r3tmp/yandexMSgkhA.tmp 2025-10-24T16:20:41.023625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001365/r3tmp/yandexMSgkhA.tmp 2025-10-24T16:20:41.023704Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:41.038708Z INFO: TTestServer started on Port 27905 GrpcPort 3121 2025-10-24T16:20:41.043653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27905 PQClient connected to localhost:3121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:41.108290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-10-24T16:20:41.118994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:20:41.467056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824000462090304:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.467075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824000462090312:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.467088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.467151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824000462090319:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.467162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.467821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:41.468187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824000462090349:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.468204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.468240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824000462090352:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.468250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.469986Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824000462090318:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:20:41.516318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.523070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.536068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:41.568066Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824000462090630:2595] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824000462090670:2623] 2025-10-24T16:20:41.916864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:45.914581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564823996167122313:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.914621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:46.784472Z :WriteToTopic_Demo_44_Table INFO: TTopicSdkTestSetup started 2025-10-24T16:20:46.787257Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:46.791393Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824021936927398:2729] connected; active server actors: 1 2025-10-24T16:20:46.791428Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:46.791529Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:46.791551Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-10-24T16:20:46.791891Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:20:46.792014Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 720 ... node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.568103Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.568105Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.568107Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.667093Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.667093Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.667101Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.667103Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.667114Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.667117Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.768101Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.768116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.768118Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.768132Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.768135Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.768136Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.867695Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.867711Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.867713Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.867730Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.867733Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.867734Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.971970Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.971985Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.971988Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:57.972006Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:57.972008Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:57.972010Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:58.075888Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:58.075904Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:58.075907Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:58.075933Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:58.075936Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:58.075938Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:58.078984Z :INFO: [/Root] [/Root] [b99d21cf-b1ee1f94-adaa3b-e673cce2] Closing read session. Close timeout: 0.000000s 2025-10-24T16:22:58.079003Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:99:100 2025-10-24T16:22:58.079011Z :INFO: [/Root] [/Root] [b99d21cf-b1ee1f94-adaa3b-e673cce2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 62047 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:22:58.079029Z :NOTICE: [/Root] [/Root] [b99d21cf-b1ee1f94-adaa3b-e673cce2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:22:58.079039Z :DEBUG: [/Root] [/Root] [b99d21cf-b1ee1f94-adaa3b-e673cce2] [] Abort session to cluster 2025-10-24T16:22:58.079250Z :NOTICE: [/Root] [/Root] [b99d21cf-b1ee1f94-adaa3b-e673cce2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:22:58.079605Z node 2 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_2_1_2855813012533390561_v1 grpc read done: success# 0, data# { } 2025-10-24T16:22:58.079617Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_2_1_2855813012533390561_v1 grpc read failed 2025-10-24T16:22:58.079623Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_2_1_2855813012533390561_v1 grpc closed 2025-10-24T16:22:58.079640Z node 2 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_2_1_2855813012533390561_v1 is DEAD 2025-10-24T16:22:58.080186Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037895][topic_A] pipe [2:7564824324973209010:2517] disconnected. 2025-10-24T16:22:58.080195Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037895][topic_A] pipe [2:7564824324973209010:2517] disconnected; active server actors: 1 2025-10-24T16:22:58.080200Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037895][topic_A] pipe [2:7564824324973209010:2517] client test-consumer disconnected session test-consumer_2_1_2855813012533390561_v1 2025-10-24T16:22:58.080236Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [2:7564824582671248931:2520] destroyed 2025-10-24T16:22:58.085234Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-10-24T16:22:58.085248Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0] PartitionId [0] Generation [1] Write session will now close 2025-10-24T16:22:58.085257Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0] PartitionId [0] Generation [1] Write session: aborting 2025-10-24T16:22:58.085475Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-10-24T16:22:58.085481Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0] PartitionId [0] Generation [1] Write session: destroy 2025-10-24T16:22:58.089044Z node 2 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0 grpc read done: success: 0 data: 2025-10-24T16:22:58.089059Z node 2 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0 grpc read failed 2025-10-24T16:22:58.089069Z node 2 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0 grpc closed 2025-10-24T16:22:58.089073Z node 2 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|84bc86cd-b689056c-9f180967-e2925d92_0 is DEAD 2025-10-24T16:22:58.089397Z node 2 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:22:58.089407Z node 2 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:22:58.089795Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [2:7564824320678241425:2494] destroyed 2025-10-24T16:22:58.089804Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [2:7564824320678241428:2494] destroyed 2025-10-24T16:22:58.089818Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:22:58.089827Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:58.089830Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:58.089834Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:22:58.176638Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:58.176654Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:58.176656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:22:58.176680Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:22:58.176682Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:22:58.176683Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist >> TResourceBroker::TestRandomQueue [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletCountersAggregator::ColumnShardCounters [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:58.784405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:58.784434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:58.784440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:58.784449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:58.784457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:58.784463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:58.784474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:58.784490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:58.784627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:58.784696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:58.848527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:58.848558Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.848655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:58.852347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:58.852377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:58.852430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:58.856653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:58.857450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:58.857572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.857644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:58.859888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.859947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:58.860215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:58.860224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.860245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:58.860253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:58.860259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:58.860294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.861651Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:58.917025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:58.917122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.917199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:58.917207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:58.918106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:58.918127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.922263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.922353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:58.922441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.922459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:58.922466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:58.922472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:58.924557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.924580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:58.924588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:58.925334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.925348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.925356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.925365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:58.927396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:58.928824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:58.929477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:58.929731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.929766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.929775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.929843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:58.929851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.929888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:58.929901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:58.933531Z node 1 :FLAT_TX_SCHEMESHARD ... 7594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.958668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.958683Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 17us result status StatusSuccess 2025-10-24T16:22:58.958733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-10-24T16:22:58.958776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-10-24T16:22:58.958783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-10-24T16:22:58.958799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-10-24T16:22:58.958803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-10-24T16:22:58.958812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-10-24T16:22:58.958815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-10-24T16:22:58.958899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-10-24T16:22:58.958916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.958921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:349:2338] 2025-10-24T16:22:58.958945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-10-24T16:22:58.958955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.958959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:349:2338] 2025-10-24T16:22:58.958967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-10-24T16:22:58.958981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.958985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:349:2338] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-10-24T16:22:58.959051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.959067Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 19us result status StatusSuccess 2025-10-24T16:22:58.959121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-10-24T16:22:58.960204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:58.960251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-10-24T16:22:58.960264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-10-24T16:22:58.960283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133, at schemeshard: 72057594046678944 2025-10-24T16:22:58.960895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-10-24T16:22:58.960934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133, operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TTabletResolver::NodeProblem [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:22:58.674152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:58.674180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:58.674186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:58.674195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:58.674202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:58.674206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:58.674215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:58.674230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:58.675017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:58.675790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:58.733579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:58.733622Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.733731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:58.746751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:58.746789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:58.746834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:58.747974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:58.748103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:58.748204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.748277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:58.752680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.752741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:58.753002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:58.753011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:58.753030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:58.753039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:58.753045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:58.753095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.760693Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:22:58.790224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:58.790330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.790405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:58.790413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:58.790481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:58.790497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.791317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.791374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:58.791444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.791455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:58.791462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:58.791467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:58.791873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.791885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:58.791891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:58.792204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.792215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:58.792221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.792228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:58.792832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:58.793196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:58.793251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:58.793474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:58.793499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:22:58.793507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.793564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:22:58.793571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:58.793605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:22:58.793617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:22:58.793991Z node 1 :FLAT_TX_SCHEMESHARD ... SHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.863972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-10-24T16:22:58.863995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.863998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:433:2422] 2025-10-24T16:22:58.864229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-10-24T16:22:58.864232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:433:2422] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2025-10-24T16:22:58.864644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:22:58.864695Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 66us result status StatusSuccess 2025-10-24T16:22:58.864777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TTabletPipeTest::TestShutdown >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] Test command err: 2025-10-24T16:22:59.652262Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:22:59.652995Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=0 resources={100, 200} 2025-10-24T16:22:59.653004Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.653011Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 200} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:22:59.653016Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.653027Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:59.653042Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=0 resources={100, 100} 2025-10-24T16:22:59.653045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.653049Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:22:59.653060Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.653065Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 600.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:22:59.653074Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-1 (1 by [1:105:2138]) (priority=0 type=compaction0 resources={200, 300} resubmit=0) 2025-10-24T16:22:59.653078Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.653081Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 200.000000 to 800.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:59.653086Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:105:2138]) (release resources {100, 100}) 2025-10-24T16:22:59.653091Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 600.000000 (remove task task-2 (2 by [1:105:2138])) 2025-10-24T16:22:59.653100Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:105:2138]) priority=0 resources={10, 20} 2025-10-24T16:22:59.653103Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.653107Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {10, 20} for task task-3 (3 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:22:59.653110Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.653114Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 40.000000 (insert task task-3 (3 by [1:105:2138])) >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> TFileStoreWithReboots::Create [GOOD] >> TabletState::ExplicitUnsubscribe >> TResourceBroker::TestCounters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:20:40.171202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:40.171229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:40.171235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:40.171243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:40.171251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:40.171255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:40.171270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:40.171283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:40.171383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:40.171446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:40.186325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:20:40.186355Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:40.190464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:40.190509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:40.190558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:40.193409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:40.195651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:40.195800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:40.195972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:40.200574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:40.200637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:40.200913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:40.200926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:40.200950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:40.200959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:40.200966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:40.200995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.202548Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:20:40.227439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:40.227532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.227592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:40.227600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:40.227677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:40.227696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:40.228509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:40.228555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:40.228618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.228630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:40.228636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:40.228641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:40.229089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.229102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:40.229108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:40.229471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.229481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:40.229487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:40.229495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:40.230195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:40.230609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:40.230656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:40.230872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:40.230901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:20:40.230909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:40.230974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:20:40.230982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:40.231012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:20:40.231026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:20:40.231462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:40.231472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... d: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 124168 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:22:58.559240Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:22:58.559255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0028 2025-10-24T16:22:58.559274Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:22:58.559281Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:22:58.609212Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.609243Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.609253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-10-24T16:22:58.609281Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:609: Will execute TTxStoreStats, queue# 1 2025-10-24T16:22:58.609289Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-10-24T16:22:58.609326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-10-24T16:22:58.609347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-10-24T16:22:58.609353Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-10-24T16:22:58.609390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:469: Do not want to split tablet 72075186233409546 by size, its table already has 1 out of 1 partitions 2025-10-24T16:22:58.609416Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:58.620018Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.620046Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.620052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:22:58.655987Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:719:2683]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-10-24T16:22:58.656100Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3474: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-10-24T16:22:58.656198Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269553162, Sender [3:719:2683], Recipient [3:130:2154]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 10 Memory: 124168 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 211 TableOwnerId: 72057594046678944 FollowerId: 0 2025-10-24T16:22:58.656207Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-10-24T16:22:58.656222Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:571: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.001 2025-10-24T16:22:58.656241Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:578: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-10-24T16:22:58.656249Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-10-24T16:22:58.707991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2025-10-24T16:22:58.708032Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-10-24T16:22:58.708044Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-10-24T16:22:58.708067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2025-10-24T16:22:58.708072Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-10-24T16:22:58.708104Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.708113Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.708117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-10-24T16:22:58.708138Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:609: Will execute TTxStoreStats, queue# 1 2025-10-24T16:22:58.708143Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:618: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-10-24T16:22:58.708172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:263: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-10-24T16:22:58.708188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:284: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2025-10-24T16:22:58.708193Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-10-24T16:22:58.708225Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:464: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2025-10-24T16:22:58.708230Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:511: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2025-10-24T16:22:58.708234Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-10-24T16:22:58.708256Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:22:58.720168Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.720190Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5426: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-10-24T16:22:58.720197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:598: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-10-24T16:22:58.960354Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:58.960376Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-10-24T16:22:58.960389Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [3:130:2154], Recipient [3:130:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-10-24T16:22:58.960394Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-10-24T16:22:59.829306Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829339Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:219:2140] followers: 0 2025-10-24T16:22:59.829350Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:219:2140] 2025-10-24T16:22:59.829382Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829401Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:225:2144] followers: 0 2025-10-24T16:22:59.829406Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:225:2144] 2025-10-24T16:22:59.829539Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:219:2140] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829544Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:219:2140] 2025-10-24T16:22:59.829561Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:225:2144] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829565Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:225:2144] 2025-10-24T16:22:59.829584Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 4 2025-10-24T16:22:59.829592Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:219:2140] by nodeId 2025-10-24T16:22:59.829596Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:219:2140] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829601Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:22:59.829622Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:235:2097] followers: 0 2025-10-24T16:22:59.829627Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:235:2097] 2025-10-24T16:22:59.829667Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:225:2144] by nodeId 2025-10-24T16:22:59.829671Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:225:2144] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829675Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:22:59.829695Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:241:2099] followers: 0 2025-10-24T16:22:59.829699Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-10-24T16:22:59.829892Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-10-24T16:22:59.829899Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:235:2097] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829903Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:235:2097] 2025-10-24T16:22:59.829927Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829931Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-10-24T16:22:59.829953Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 6 2025-10-24T16:22:59.829958Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:235:2097] by nodeId 2025-10-24T16:22:59.829962Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:235:2097] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.829965Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:22:59.829986Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:253:2097] followers: 0 2025-10-24T16:22:59.829990Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:253:2097] 2025-10-24T16:22:59.830036Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.830040Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:241:2099] 2025-10-24T16:22:59.830062Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2025-10-24T16:22:59.830068Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:253:2097] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.830071Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:253:2097] 2025-10-24T16:22:59.830094Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:241:2099] by nodeId 2025-10-24T16:22:59.830098Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:241:2099] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-10-24T16:22:59.830102Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:22:59.830125Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:259:2099] followers: 0 2025-10-24T16:22:59.830129Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:259:2099] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-10-24T16:22:59.004765Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:22:59.004852Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004859Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.004865Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:22:59.004869Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.004881Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:22:59.004889Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004893Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.004898Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:22:59.004904Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004907Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.004911Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:22:59.004914Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.004918Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004921Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.004924Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:22:59.004927Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.004932Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004935Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.004938Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:22:59.004941Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.004945Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:22:59.004948Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.004951Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:22:59.004954Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.004964Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {400, 400}) 2025-10-24T16:22:59.004971Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 800.000000 2025-10-24T16:22:59.004975Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:22:59.004978Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.004982Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:22:59.004986Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:105:2138]) 2025-10-24T16:22:59.004988Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.004994Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:105:2138]) (release resources {400, 400}) 2025-10-24T16:22:59.004998Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 800.000000 to 280.000000 (remove task task-2 (2 by [1:105:2138])) 2025-10-24T16:22:59.005002Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 280.000000 2025-10-24T16:22:59.005005Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-4 (4 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:22:59.005008Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.005012Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 280.000000 to 1054.000000 (insert task task-4 (4 by [1:105:2138])) 2025-10-24T16:22:59.005015Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:22:59.005018Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-10-24T16:22:59.005023Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:105:2138]) (release resources {400, 400}) 2025-10-24T16:22:59.005026Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1054.000000 to 560.000000 (remove task task-4 (4 by [1:105:2138])) 2025-10-24T16:22:59.005030Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 280.000000 to 560.000000 2025-10-24T16:22:59.005034Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-6 (6 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:22:59.005037Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.005040Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 560.000000 to 1308.000000 (insert task task-6 (6 by [1:105:2138])) 2025-10-24T16:22:59.005043Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:22:59.373809Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:22:59.374479Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [2:105:2138]) priority=1 resources={290, 440} 2025-10-24T16:22:59.374491Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.374500Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {290, 440} for task task-1 (1 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.374505Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.374520Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 880.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:22:59.374532Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-2 (2 by [2:105:2138]) priority=1 resources={182, 350} 2025-10-24T16:22:59.374538Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-2 (2 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.374544Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:22:59.374551Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [2:105:2138]) priority=2 resources={247, 87} 2025-10-24T16:22:59.374556Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:22:59.374561Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:105:2138]) 2025-10-24T16:22:59.374567Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_default blocked by an earlier queue 2025-10-24T16:22:59.374574Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-4 (4 by [2:105:2138]) priority=0 resources={29, 265} 2025-10-24T16:22:59.374579Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-4 (4 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.374583Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:105:2138]) 2025-10-24T16:22:59.374588Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_default blocked by an earlier queue 2025-10-24T16:22:59.374596Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=4 resources={81, 97} 2025-10-24T16:22:59.374600Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.374605Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:105:2138]) 2025-10-24T16:22:59.374609Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.374613Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_default blocked by an earlier queue 2025-10-24T16:22:59.374620Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-6 (6 by [2:105:2138]) priority=2 resources={61, 367} 2025-10-24T16:22:59.374624Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:22:59.374629Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:105:2138]) 2025-10-24T16:22:59.374633Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:22:59.374638Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_default blocked by an earlier queue 2025-10-24T16:22: ... resource usage for queue queue_default from 694018.849200 to 695402.588400 2025-10-24T16:22:59.663786Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {117, 194} for task task-890 (890 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663790Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-890 (890 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.663795Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 693278.580000 to 696529.883600 (insert task task-890 (890 by [2:105:2138])) 2025-10-24T16:22:59.663800Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {159, 196} for task task-894 (894 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663804Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-894 (894 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.663809Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 696529.883600 to 697668.800400 (insert task task-894 (894 by [2:105:2138])) 2025-10-24T16:22:59.663813Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-895 (895 by [2:105:2138]) 2025-10-24T16:22:59.663820Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-890 (890 by [2:105:2138]) (release resources {117, 194}) 2025-10-24T16:22:59.663826Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 697668.800400 to 697166.883600 (remove task task-890 (890 by [2:105:2138])) 2025-10-24T16:22:59.663831Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 695402.588400 to 696659.948400 2025-10-24T16:22:59.663835Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-895 (895 by [2:105:2138]) 2025-10-24T16:22:59.663841Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-894 (894 by [2:105:2138]) (release resources {159, 196}) 2025-10-24T16:22:59.663857Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 696659.948400 to 698094.041200 2025-10-24T16:22:59.663861Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {236, 495} for task task-895 (895 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663865Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-895 (895 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.663870Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 697166.883600 to 701086.613200 (insert task task-895 (895 by [2:105:2138])) 2025-10-24T16:22:59.663874Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-921 (921 by [2:105:2138]) 2025-10-24T16:22:59.663882Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-895 (895 by [2:105:2138]) (release resources {236, 495}) 2025-10-24T16:22:59.663888Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 701086.613200 to 699685.763200 (remove task task-895 (895 by [2:105:2138])) 2025-10-24T16:22:59.663892Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 698094.041200 to 699685.763200 2025-10-24T16:22:59.663897Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {361, 186} for task task-921 (921 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663901Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-921 (921 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.663907Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 699685.763200 to 701877.033200 (insert task task-921 (921 by [2:105:2138])) 2025-10-24T16:22:59.663913Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-925 (925 by [2:105:2138]) 2025-10-24T16:22:59.663919Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-921 (921 by [2:105:2138]) (release resources {361, 186}) 2025-10-24T16:22:59.663924Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 701877.033200 to 700758.510800 (remove task task-921 (921 by [2:105:2138])) 2025-10-24T16:22:59.663929Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 699685.763200 to 700758.510800 2025-10-24T16:22:59.663933Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {182, 282} for task task-925 (925 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663936Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-925 (925 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.663941Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 700758.510800 to 702421.972400 (insert task task-925 (925 by [2:105:2138])) 2025-10-24T16:22:59.663946Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {160, 73} for task task-948 (948 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.663949Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-948 (948 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.663954Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 702421.972400 to 702852.584800 (insert task task-948 (948 by [2:105:2138])) 2025-10-24T16:22:59.663958Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-954 (954 by [2:105:2138]) 2025-10-24T16:22:59.663964Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-925 (925 by [2:105:2138]) (release resources {182, 282}) 2025-10-24T16:22:59.663970Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 702852.584800 to 701244.959200 (remove task task-925 (925 by [2:105:2138])) 2025-10-24T16:22:59.663975Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 700758.510800 to 700828.942800 2025-10-24T16:22:59.663979Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-954 (954 by [2:105:2138]) 2025-10-24T16:22:59.663985Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-948 (948 by [2:105:2138]) (release resources {160, 73}) 2025-10-24T16:22:59.663990Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 701244.959200 to 701032.718800 (remove task task-948 (948 by [2:105:2138])) 2025-10-24T16:22:59.663995Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 700828.942800 to 701032.718800 2025-10-24T16:22:59.663999Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {440, 331} for task task-954 (954 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.664004Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-954 (954 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.664009Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 701032.718800 to 703455.182800 (insert task task-954 (954 by [2:105:2138])) 2025-10-24T16:22:59.664015Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-979 (979 by [2:105:2138]) 2025-10-24T16:22:59.664021Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-954 (954 by [2:105:2138]) (release resources {440, 331}) 2025-10-24T16:22:59.664026Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 703455.182800 to 702782.862800 (remove task task-954 (954 by [2:105:2138])) 2025-10-24T16:22:59.664032Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 701032.718800 to 702782.862800 2025-10-24T16:22:59.664036Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {339, 124} for task task-979 (979 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.664040Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-979 (979 by [2:105:2138])' of unknown type 'wrong' to default queue 2025-10-24T16:22:59.664045Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 702782.862800 to 704696.992400 (insert task task-979 (979 by [2:105:2138])) 2025-10-24T16:22:59.664049Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_default due to exceeded limits 2025-10-24T16:22:59.664055Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-979 (979 by [2:105:2138]) (release resources {339, 124}) 2025-10-24T16:22:59.664060Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 702782.862800 to 705315.464000 2025-10-24T16:22:59.664064Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {70, 87} for task task-986 (986 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.664068Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-986 (986 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.664073Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 704696.992400 to 705821.873600 (insert task task-986 (986 by [2:105:2138])) 2025-10-24T16:22:59.664078Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {41, 139} for task task-994 (994 by [2:105:2138]) from queue queue_default 2025-10-24T16:22:59.664082Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-994 (994 by [2:105:2138]) to queue queue_default 2025-10-24T16:22:59.664086Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 705821.873600 to 706630.964800 (insert task task-994 (994 by [2:105:2138])) 2025-10-24T16:22:59.664092Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-986 (986 by [2:105:2138]) (release resources {70, 87}) 2025-10-24T16:22:59.664097Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 706630.964800 to 706137.083200 (remove task task-986 (986 by [2:105:2138])) 2025-10-24T16:22:59.664102Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 705315.464000 to 705348.098400 2025-10-24T16:22:59.664109Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-994 (994 by [2:105:2138]) (release resources {41, 139}) 2025-10-24T16:22:59.664114Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 706137.083200 to 705881.914000 (remove task task-994 (994 by [2:105:2138])) 2025-10-24T16:22:59.664119Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 705348.098400 to 705881.914000 >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TTabletResolver::TabletResolvePriority [GOOD] |53.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for boot2 ... waiting for connect2 >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletPipeTest::TestTwoNodes >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries >> TResourceBroker::TestResubmitTask >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TTabletLabeledCountersAggregator::SimpleAggregation >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TabletState::ExplicitUnsubscribe [GOOD] |53.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TResourceBroker::TestChangeTaskType [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:42.133802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:42.133822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.133827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:42.133834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:42.133840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:42.133844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:42.133853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.133869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:42.133985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:42.134043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:42.158838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:42.158866Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:42.158955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.161532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:42.161611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:42.161656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:42.164082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:42.164197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:42.164292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.164354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:42.165098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.165141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:42.165347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:42.165356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.165378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:42.165385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:42.165391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:42.165411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:42.166614Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.188182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:42.188229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.188269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:42.188276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:42.188312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:42.188325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:42.188855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.188889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:42.188923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.188936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:42.188941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:42.188945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:42.189288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.189298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:42.189303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:42.189606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.189616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.189621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:42.189626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:42.190257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:42.190620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:42.190660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:42.190834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.190858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... 1 msg type: 269090816 2025-10-24T16:23:00.789378Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1001, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1001 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1001 at step: 5000003 2025-10-24T16:23:00.789434Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:00.789452Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1001 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 214748366959 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:00.789458Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_fs.cpp:155: TCreateFileStore::TPropose operationId# 1001:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-10-24T16:23:00.789473Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1001:0 128 -> 240 2025-10-24T16:23:00.789497Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:00.789507Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1001 2025-10-24T16:23:00.794477Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:00.794485Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:00.794515Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1001, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:00.794534Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:00.794538Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:211:2212], at schemeshard: 72057594046678944, txId: 1001, path id: 2 2025-10-24T16:23:00.794544Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [50:211:2212], at schemeshard: 72057594046678944, txId: 1001, path id: 3 2025-10-24T16:23:00.794585Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1001:0, at schemeshard: 72057594046678944 2025-10-24T16:23:00.794592Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1001:0 ProgressState 2025-10-24T16:23:00.794606Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1001:0 progress is 1/1 2025-10-24T16:23:00.794610Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2025-10-24T16:23:00.794614Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1001:0 progress is 1/1 2025-10-24T16:23:00.794617Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2025-10-24T16:23:00.794621Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1001, ready parts: 1/1, is published: false 2025-10-24T16:23:00.794625Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1001 ready parts: 1/1 2025-10-24T16:23:00.794629Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1001:0 2025-10-24T16:23:00.794633Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1001:0 2025-10-24T16:23:00.794654Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:23:00.794659Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1001, publications: 2, subscribers: 0 2025-10-24T16:23:00.794663Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-10-24T16:23:00.794666Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1001, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:23:00.794790Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2025-10-24T16:23:00.794801Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 1001 2025-10-24T16:23:00.794805Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1001 2025-10-24T16:23:00.794809Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-10-24T16:23:00.794813Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:00.794938Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2025-10-24T16:23:00.794949Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1001 2025-10-24T16:23:00.794953Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1001 2025-10-24T16:23:00.794957Z node 50 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1001, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:23:00.794960Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:00.794967Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1001, subscribers: 0 2025-10-24T16:23:00.804104Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 2025-10-24T16:23:00.804133Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1001 TestModificationResult got TxId: 1001, wait until txId: 1001 TestWaitNotification wait txId: 1001 2025-10-24T16:23:00.804181Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1001: send EvNotifyTxCompletion 2025-10-24T16:23:00.804187Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1001 2025-10-24T16:23:00.804249Z node 50 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1001, at schemeshard: 72057594046678944 2025-10-24T16:23:00.804271Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1001: got EvNotifyTxCompletionResult 2025-10-24T16:23:00.804276Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1001: satisfy waiter [50:382:2361] TestWaitNotification: OK eventTxId 1001 2025-10-24T16:23:00.804348Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:00.804389Z node 50 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_1" took 56us result status StatusSuccess 2025-10-24T16:23:00.804484Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_1" PathDescription { Self { Name: "FS_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } FileStoreDescription { Name: "FS_1" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "FS_1" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk >> TTabletPipeTest::TestTwoNodes [GOOD] |53.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::ExplicitUnsubscribe [GOOD] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TResourceBroker::TestUpdateCookie [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:114:2057] recipient: [1:110:2141] Leader for TabletID 9437185 is [0:0:0] sender: [1:115:2057] recipient: [1:112:2142] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:115:2057] recipient: [1:112:2142] Leader for TabletID 9437184 is [1:122:2149] sender: [1:123:2057] recipient: [1:110:2141] Leader for TabletID 9437185 is [1:125:2151] sender: [1:127:2057] recipient: [1:112:2142] Leader for TabletID 9437184 is [1:122:2149] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:164:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:166:2057] recipient: [1:107:2140] Leader for TabletID 9437185 is [1:125:2151] sender: [1:169:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:125:2151] sender: [1:171:2057] recipient: [1:170:2180] Leader for TabletID 9437185 is [1:172:2181] sender: [1:173:2057] recipient: [1:170:2180] Leader for TabletID 9437185 is [1:172:2181] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:122:2149] sender: [1:204:2057] recipient: [1:106:2139] Leader for TabletID 9437184 is [1:122:2149] sender: [1:207:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:209:2057] recipient: [1:206:2204] Leader for TabletID 9437184 is [1:208:2205] sender: [1:238:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [0:0:0] sender: [2:110:2057] recipient: [2:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [2:110:2057] recipient: [2:108:2140] Leader for TabletID 9437184 is [2:114:2144] sender: [2:115:2057] recipient: [2:108:2140] Leader for TabletID 9437184 is [2:114:2144] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [2:163:2057] recipient: [2:161:2166] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:163:2057] recipient: [2:161:2166] Leader for TabletID 9437185 is [2:167:2170] sender: [2:168:2057] recipient: [2:161:2166] Leader for TabletID 9437185 is [2:167:2170] sender: [2:193:2057] recipient: [2:14:2061] >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] Test command err: 2025-10-24T16:23:02.534571Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:02.535403Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:02.535411Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535418Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:02.535423Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535443Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:02.535452Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [1:105:2138]) priority=5 resources={100, 100} 2025-10-24T16:23:02.535456Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535461Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-2 (2 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:02.535465Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535470Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:02.535477Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:105:2138]) priority=5 resources={100, 100} 2025-10-24T16:23:02.535481Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535485Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-3 (3 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:02.535489Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535494Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 400.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:02.535500Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [1:105:2138]) priority=5 resources={100, 100} 2025-10-24T16:23:02.535503Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535507Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-4 (4 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:02.535510Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535514Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 600.000000 (insert task task-4 (4 by [1:105:2138])) 2025-10-24T16:23:02.535519Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:105:2138]) priority=5 resources={250, 250} 2025-10-24T16:23:02.535523Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535527Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:105:2138]) 2025-10-24T16:23:02.535534Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [1:105:2138]) priority=5 resources={250, 250} 2025-10-24T16:23:02.535537Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535541Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:105:2138]) 2025-10-24T16:23:02.535544Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-10-24T16:23:02.535549Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-7 (7 by [1:105:2138]) priority=5 resources={150, 150} 2025-10-24T16:23:02.535552Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535556Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:105:2138]) 2025-10-24T16:23:02.535559Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-10-24T16:23:02.535590Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {200, 200}) 2025-10-24T16:23:02.535595Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 0.000000 (remove task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:02.535599Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-5 (5 by [1:105:2138]) 2025-10-24T16:23:02.535602Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction1 blocked by an earlier queue 2025-10-24T16:23:02.535608Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:105:2138]) (release resources {100, 100}) 2025-10-24T16:23:02.535612Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 600.000000 to 400.000000 (remove task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:02.535615Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-5 (5 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:02.535619Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535623Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 475.000000 (insert task task-5 (5 by [1:105:2138])) 2025-10-24T16:23:02.535626Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-6 (6 by [1:105:2138]) 2025-10-24T16:23:02.535646Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:105:2138]) (release resources {100, 100}) 2025-10-24T16:23:02.535650Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 400.000000 to 200.000000 (remove task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:02.535653Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-6 (6 by [1:105:2138]) 2025-10-24T16:23:02.535658Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:105:2138]) (release resources {100, 100}) 2025-10-24T16:23:02.535661Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 0.000000 (remove task task-4 (4 by [1:105:2138])) 2025-10-24T16:23:02.535665Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-6 (6 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:02.535670Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535674Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 425.000000 (insert task task-6 (6 by [1:105:2138])) 2025-10-24T16:23:02.535677Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-7 (7 by [1:105:2138]) 2025-10-24T16:23:02.535696Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-5 (5 by [1:105:2138]) (release resources {250, 250}) 2025-10-24T16:23:02.535700Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 475.000000 to 0.000000 (remove task task-5 (5 by [1:105:2138])) 2025-10-24T16:23:02.535703Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {150, 150} for task task-7 (7 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:02.535707Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:02.535711Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 425.000000 to 680.000000 (insert task task-7 (7 by [1:105:2138])) 2025-10-24T16:23:02.535715Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-6 (6 by [1:105:2138]) (release resources {250, 250}) 2025-10-24T16:23:02.535719Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 680.000000 to 255.000000 (remove task task-6 (6 by [1:105:2138])) 2025-10-24T16:23:02.535740Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-7 (7 by [1:105:2138]) (release resources {150, 150}) 2025-10-24T16:23:02.535745Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 255.000000 to 0.000000 (remove task task-7 (7 by [1:105:2138])) 2025-10-24T16:23:02.535752Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1000 (1000 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:02.535756Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1000 (1000 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535760Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1000 (1000 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:02.535764Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1000 (1000 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:02.535768Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 900.000000 (insert task task-1000 (1000 by [1:105:2138])) 2025-10-24T16:23:02.535776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:105:2138]) priority=5 resources={1, 1} 2025-10-24T16:23:02.535780Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.535784Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:105:2138]) 2025-10-24T16:23:02.535790Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-2 (2 by [1:105:2138]) priority=5 resources={1, 1} 2025-10-24T16:23:02.535793Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task- ... task-9 (9 by [1:105:2138]) priority=5 resources={1, 1} 2025-10-24T16:23:02.536166Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-9 (9 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536171Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:105:2138]) 2025-10-24T16:23:02.536178Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-10 (10 by [1:105:2138]) priority=5 resources={1, 1} 2025-10-24T16:23:02.536182Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-10 (10 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536185Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-1 (1 by [1:105:2138]) 2025-10-24T16:23:02.536191Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1000 (1000 by [1:105:2138]) (release resources {500, 500}) 2025-10-24T16:23:02.536196Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1500.000000 2025-10-24T16:23:02.536202Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-1 (1 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536206Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536209Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 2.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:02.536213Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-2 (2 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536216Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536220Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 2.000000 to 4.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:02.536224Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-3 (3 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536227Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536230Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 4.000000 to 6.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:02.536234Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-4 (4 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536237Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536241Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 6.000000 to 8.000000 (insert task task-4 (4 by [1:105:2138])) 2025-10-24T16:23:02.536244Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-5 (5 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536248Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536252Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 8.000000 to 10.000000 (insert task task-5 (5 by [1:105:2138])) 2025-10-24T16:23:02.536255Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-6 (6 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536259Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536262Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 10.000000 to 12.000000 (insert task task-6 (6 by [1:105:2138])) 2025-10-24T16:23:02.536266Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-7 (7 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536269Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536273Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 12.000000 to 14.000000 (insert task task-7 (7 by [1:105:2138])) 2025-10-24T16:23:02.536277Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-8 (8 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536280Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-8 (8 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536285Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 14.000000 to 16.000000 (insert task task-8 (8 by [1:105:2138])) 2025-10-24T16:23:02.536288Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-9 (9 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536292Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-9 (9 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536296Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 16.000000 to 18.000000 (insert task task-9 (9 by [1:105:2138])) 2025-10-24T16:23:02.536299Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {1, 1} for task task-10 (10 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:02.536302Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-10 (10 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:02.536306Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 18.000000 to 20.000000 (insert task task-10 (10 by [1:105:2138])) 2025-10-24T16:23:02.536318Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536322Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 0.000000 to 20.000000 2025-10-24T16:23:02.536326Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536331Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536335Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536340Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-5 (5 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536344Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-6 (6 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536348Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-7 (7 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536353Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-8 (8 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536357Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-9 (9 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:02.536361Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-10 (10 by [1:105:2138]) (release resources {1, 1}) 2025-10-24T16:23:03.155472Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:03.155550Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:104:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:03.155557Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:03.155563Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [2:104:2138]) from queue queue_compaction0 2025-10-24T16:23:03.155567Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:03.155576Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:104:2138])) 2025-10-24T16:23:03.155584Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [2:104:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:03.155587Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:03.155591Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:03.155597Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:104:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:03.155601Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:03.155605Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:03.155617Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-3 (3 by [2:104:2138]) (priority=5 type=compaction1 resources={400, 400} resubmit=0) 2025-10-24T16:23:03.155621Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:104:2138]) to queue queue_compaction1 2025-10-24T16:23:03.155625Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [2:104:2138]) 2025-10-24T16:23:03.155628Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:03.155634Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:104:2138]) (release resources {400, 400}) 2025-10-24T16:23:03.155640Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 80.000000 (remove task task-1 (1 by [2:104:2138])) 2025-10-24T16:23:03.155644Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 2025-10-24T16:23:03.155648Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-3 (3 by [2:104:2138]) from queue queue_compaction1 2025-10-24T16:23:03.155651Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:104:2138]) to queue queue_compaction1 2025-10-24T16:23:03.155655Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 800.000000 (insert task task-3 (3 by [2:104:2138])) 2025-10-24T16:23:03.155658Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] Test command err: 2025-10-24T16:23:03.268712Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:03.268803Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:03.268811Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268819Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.268824Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268837Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 400.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:03.268846Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:03.268850Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268855Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.268859Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268864Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 800.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:03.268871Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:03.268875Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268880Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:23:03.268893Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:105:2138]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-10-24T16:23:03.268898Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268902Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:03.268909Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {200, 200}) 2025-10-24T16:23:03.268918Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 400.000000 to 40.000000 (remove task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:03.268922Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 40.000000 2025-10-24T16:23:03.268927Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.268932Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268937Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 804.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:03.268943Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:23:03.268949Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [1:105:2138]) (priority=5 type=compaction0 resources={200, 200} resubmit=1) 2025-10-24T16:23:03.268954Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268958Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.268962Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268966Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 40.000000 to 422.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:03.268971Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.268975Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.268979Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 422.000000 to 804.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:03.792359Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:03.792460Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:03.792468Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792476Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.792481Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792494Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:03.792510Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [2:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:03.792514Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792520Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:03.792527Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:03.792533Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {400, 400}) 2025-10-24T16:23:03.792539Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 800.000000 to 0.000000 (remove task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:03.792544Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.792548Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792553Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 380.000000 (insert task task-2 (2 by [2:105:2138])) 2025-10-24T16:23:03.792562Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:03.792566Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792570Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.792574Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792579Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 380.000000 to 760.000000 (insert task task-3 (3 by [2:105:2138])) 2025-10-24T16:23:03.792585Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:472: Update cookie for task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:03.792590Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [2:105:2138]) (release resources {200, 200}) 2025-10-24T16:23:03.792595Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 760.000000 to 380.000000 (remove task task-3 (3 by [2:105:2138])) 2025-10-24T16:23:03.792603Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-2 (2 by [2:105:2138]) (priority=5 type=compaction0 resources={400, 400} resubmit=1) 2025-10-24T16:23:03.792609Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792613Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-2 (2 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:03.792617Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:03.792621Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 720.000000 (insert task task-2 (2 by [2:105:2138])) >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> ReadSessionImplTest::ForcefulDestroyPartitionStream ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-10-24T16:22:57.700899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:57.932678Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:57.932760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:57.932774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000956/r3tmp/tmpxa7D2O/pdisk_1.dat 2025-10-24T16:22:58.226085Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.228965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.228998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.229111Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976669477 != 1761322976669480 2025-10-24T16:22:58.263950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.328832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.391256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:58.481807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:58.497803Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:22:58.497870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.520263Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:22:58.520315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.521531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.521556Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.521695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:58.521703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:58.521709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:58.521757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.521777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.521788Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:22:58.521844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.521858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.521938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:58.521943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:58.521948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:58.521968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.521979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.521986Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:22:58.536076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.541233Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:58.541307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.541339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:22:58.541344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:58.541349Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:58.541354Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.541387Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.541395Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:58.541405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.541413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:22:58.541416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:58.541420Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:58.541423Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:58.541563Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:58.541585Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:58.541591Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:58.541598Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:58.541617Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.541624Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.541632Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:58.541637Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.541643Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:58.541646Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.541650Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:22:58.541653Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:22:58.541743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2570], serverId# [1:690:2577], sessionId# [0:0:0] 2025-10-24T16:22:58.541751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:680:2571], serverId# [1:692:2578], sessionId# [0:0:0] 2025-10-24T16:22:58.541773Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:58.541822Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:58.541838Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:58.541911Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-10-24T16:22:58.541931Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:22:58.541939Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-10-24T16:22:58.542255Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:58.542268Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:22:58.556077Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:58.556113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:58.556137Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-10-24T16:22:58.556145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:58.704347Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2608], serverId# [1:743:2611], sessionId# [0:0:0] 2025-10-24T16:22:58.704385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server conne ... vPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-10-24T16:23:04.107883Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:23:04.108176Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:23:04.108184Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:04.108193Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:23:04.108199Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:23:04.108203Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-10-24T16:23:04.108216Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:04.108223Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:04.108234Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:04.108274Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:23:04.108278Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:04.108453Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:04.108459Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:04.108463Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:04.108470Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:04.108474Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:04.108481Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:04.108691Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:04.108705Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:04.115620Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:04.115704Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:04.115741Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-10-24T16:23:04.115750Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-10-24T16:23:04.115888Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:04.115896Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:04.117303Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:04.117319Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:797:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:04.117327Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:04.117438Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:803:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:04.117453Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:04.118070Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:04.119033Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:04.119052Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:04.175711Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:04.288333Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:04.288379Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:04.288957Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:04.321839Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:875:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:04.355069Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgfrnn50hvrgbzp87zsjza, Database: , SessionId: ydb://session/3?node_id=4&id=MTdhOTY5NC1mNGJiMzRiLWFkM2RlNWMtZjljNWJmMDE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:04.355540Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:944:2729], serverId# [4:945:2730], sessionId# [0:0:0] 2025-10-24T16:23:04.355643Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-10-24T16:23:04.355710Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322984355682 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:04.355732Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-10-24T16:23:04.366852Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:04.366875Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:04.378206Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfrxf1d157e0r0r2f23wc, Database: , SessionId: ydb://session/3?node_id=4&id=ZmZlYTFkYjEtYzI4YWIzYy1mZmY1NWZjOS1lZDVjNWFiNQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:04.378717Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-10-24T16:23:04.378796Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322984378769 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:04.378821Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1761322984378769 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:04.378835Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-10-24T16:23:04.392293Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:04.392320Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:04.393187Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:985:2761], serverId# [4:986:2762], sessionId# [0:0:0] 2025-10-24T16:23:04.394037Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:987:2763], serverId# [4:988:2764], sessionId# [0:0:0] >> TPipeCacheTest::TestAutoConnect [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 >> ReadSessionImplTest::SuccessfulInit [GOOD] >> BuildStatsHistogram::Many_Serial [GOOD] >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] |53.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> Compression::WriteRAW >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-10-24T16:23:06.207136Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.207144Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.207149Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.233114Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.234314Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.252794Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.252999Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.260477Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.260487Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.260491Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.261275Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.261403Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.261456Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.261969Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.262087Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-10-24T16:23:06.262422Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.262427Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.262432Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.272450Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.277498Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.277590Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.281038Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.281259Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.281373Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.281397Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.281406Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-10-24T16:23:06.281808Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.281814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.281818Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.291107Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.291286Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.291905Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.292997Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-10-24T16:23:06.293370Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:06.293409Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-10-24T16:23:06.293469Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-10-24T16:23:06.293495Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-10-24T16:23:06.294556Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.294566Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:06.294577Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:06.294622Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-10-24T16:23:06.294632Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:06.294636Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-10-24T16:23:06.294640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:06.294663Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-10-24T16:23:06.294686Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-10-24T16:23:06.294690Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-10-24T16:23:06.294694Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:06.294706Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-10-24T16:23:06.294712Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-10-24T16:23:06.294716Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-10-24T16:23:06.294720Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:06.294736Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-10-24T16:23:06.300119Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.300126Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.300130Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.300185Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.310639Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.310725Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.311929Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-10-24T16:23:06.312168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:06.312203Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-10-24T16:23:06.312268Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-10-24T16:23:06.312283Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-10-24T16:23:06.314635Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.314649Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:06.314654Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:06.314659Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-10-24T16:23:06.314670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:06.314733Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-10-24T16:23:06.314762Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-10-24T16:23:06.314766Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-10-24T16:23:06.314770Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-10-24T16:23:06.314775Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-10-24T16:23:06.314780Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:06.314803Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-10-24T16:23:06.318067Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.318073Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.318077Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.318151Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.318282Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.318332Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.318386Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.318612Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:06.318684Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:06.318731Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-10-24T16:23:06.318749Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-10-24T16:23:06.318774Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.318781Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:06.318785Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-10-24T16:23:06.318789Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-10-24T16:23:06.318796Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-10-24T16:23:06.318800Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-10-24T16:23:06.318830Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-10-24T16:23:06.318854Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit |53.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] Test command err: LabeledCountersByGroup { Group: "group1/group2" LabeledCounter { Value: 39 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "/" } CounterNames: "value1" 2025-10-24T16:23:03.084847Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [2:8:2055] 2025-10-24T16:23:03.084914Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:9:2056] worker 0 2025-10-24T16:23:03.084921Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:10:2057] worker 1 2025-10-24T16:23:03.084926Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:11:2058] worker 2 2025-10-24T16:23:03.084949Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:12:2059] worker 3 2025-10-24T16:23:03.084954Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:13:2060] worker 4 2025-10-24T16:23:03.084960Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:14:2061] worker 5 2025-10-24T16:23:03.084965Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:15:2062] worker 6 2025-10-24T16:23:03.084970Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:16:2063] worker 7 2025-10-24T16:23:03.084975Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:17:2064] worker 8 2025-10-24T16:23:03.084980Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:18:2065] worker 9 Sending message to [2:10:2057] from [2:8:2055] id 1 Sending message to [2:11:2058] from [2:8:2055] id 2 Sending message to [2:12:2059] from [2:8:2055] id 3 Sending message to [2:13:2060] from [2:8:2055] id 4 Sending message to [2:14:2061] from [2:8:2055] id 5 Sending message to [2:15:2062] from [2:8:2055] id 6 Sending message to [2:16:2063] from [2:8:2055] id 7 Sending message to [2:17:2064] from [2:8:2055] id 8 Sending message to [2:18:2065] from [2:8:2055] id 9 Sending message to [2:9:2056] from [2:8:2055] id 10 2025-10-24T16:23:03.758226Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [2:11:2058] 2025-10-24T16:23:03.758380Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [2:12:2059] 2025-10-24T16:23:03.758395Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [2:13:2060] 2025-10-24T16:23:03.758406Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [2:14:2061] 2025-10-24T16:23:03.758415Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [2:15:2062] 2025-10-24T16:23:03.758432Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [2:16:2063] 2025-10-24T16:23:03.758441Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [2:17:2064] 2025-10-24T16:23:03.758450Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [2:18:2065] 2025-10-24T16:23:03.758513Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [2:9:2056] 2025-10-24T16:23:03.758521Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [2:10:2057] 2025-10-24T16:23:03.758531Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [2:9:2056] 2025-10-24T16:23:03.758913Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [2:9:2056] 2025-10-24T16:23:03.769639Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:9:2056] Initiator [2:8:2055] 2025-10-24T16:23:03.778537Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:10:2057] 2025-10-24T16:23:03.778962Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:10:2057] 2025-10-24T16:23:03.788368Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:10:2057] Initiator [2:8:2055] 2025-10-24T16:23:03.796632Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:11:2058] 2025-10-24T16:23:03.797032Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:11:2058] 2025-10-24T16:23:03.802760Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:11:2058] Initiator [2:8:2055] 2025-10-24T16:23:03.811346Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:12:2059] 2025-10-24T16:23:03.811714Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:12:2059] 2025-10-24T16:23:03.816718Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:12:2059] Initiator [2:8:2055] 2025-10-24T16:23:03.824513Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:13:2060] 2025-10-24T16:23:03.824840Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:13:2060] 2025-10-24T16:23:03.829845Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:13:2060] Initiator [2:8:2055] 2025-10-24T16:23:03.844975Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:14:2061] 2025-10-24T16:23:03.845377Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:14:2061] 2025-10-24T16:23:03.850316Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:14:2061] Initiator [2:8:2055] 2025-10-24T16:23:03.868340Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:15:2062] 2025-10-24T16:23:03.868730Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:15:2062] 2025-10-24T16:23:03.879308Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:15:2062] Initiator [2:8:2055] 2025-10-24T16:23:03.887476Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:16:2063] 2025-10-24T16:23:03.887844Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:16:2063] 2025-10-24T16:23:03.892808Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:16:2063] Initiator [2:8:2055] 2025-10-24T16:23:03.900355Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:17:2064] 2025-10-24T16:23:03.900739Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:17:2064] 2025-10-24T16:23:03.905421Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:17:2064] Initiator [2:8:2055] 2025-10-24T16:23:03.915424Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:18:2065] 2025-10-24T16:23:03.915738Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:18:2065] 2025-10-24T16:23:03.924423Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:18:2065] Initiator [2:8:2055] 2025-10-24T16:23:03.932585Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [2:8:2055] 2025-10-24T16:23:03.932645Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [2:8:2055] 2025-10-24T16:23:03.933793Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:8:2055] 2025-10-24T16:23:03.933839Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:8:2055] 2025-10-24T16:23:03.935298Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:8:2055] 2025-10-24T16:23:03.935345Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:8:2055] 2025-10-24T16:23:03.938160Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:8:2055] 2025-10-24T16:23:03.938210Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:8:2055] 2025-10-24T16:23:03.940125Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:8:2055] 2025-10-24T16:23:03.940170Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:8:2055] 2025-10-24T16:23:03.941547Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:8:2055] 2025-10-24T16:23:03.941590Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:8:2055] 2025-10-24T16:23:03.942989Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:8:2055] 2025-10-24T16:23:03.943033Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:8:2055] 2025-10-24T16:23:03.945223Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:8:2055] 2025-10-24T16:23:03.945263Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:8:2055] 2025-10-24T16:23:03.946603Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:8:2055] 2025-10-24T16:23:03.946642Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:8:2055] 2025-10-24T16:23:03.947919Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:8:2055] 2025-10-24T16:23:03.947955Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:8:2055] 2025-10-24T16:23:03.949187Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:8:2055] Initiator [2:7:2054] TEST 2 10 duration 0.921927s 2025-10-24T16:23:04.131121Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [3:8:2055] 2025-10-24T16:23:04.131667Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:9:2056] worker 0 2025-10-24T16:23:04.131675Z node 3 :TABLET_AGGREGATOR INFO ... or got response node 9 [3:8:2055] 2025-10-24T16:23:04.534687Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [3:8:2055] 2025-10-24T16:23:04.539416Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [3:8:2055] 2025-10-24T16:23:04.539474Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [3:8:2055] 2025-10-24T16:23:04.543747Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [3:8:2055] 2025-10-24T16:23:04.543802Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [3:8:2055] 2025-10-24T16:23:04.551587Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [3:8:2055] Initiator [3:7:2054] TEST 2 20 duration 0.514502s 2025-10-24T16:23:04.771673Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [4:8:2055] 2025-10-24T16:23:04.771705Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [4:8:2055] self [4:9:2056] worker 0 Sending message to [4:9:2056] from [4:8:2055] id 1 Sending message to [4:9:2056] from [4:8:2055] id 2 Sending message to [4:9:2056] from [4:8:2055] id 3 Sending message to [4:9:2056] from [4:8:2055] id 4 Sending message to [4:9:2056] from [4:8:2055] id 5 Sending message to [4:9:2056] from [4:8:2055] id 6 Sending message to [4:9:2056] from [4:8:2055] id 7 Sending message to [4:9:2056] from [4:8:2055] id 8 Sending message to [4:9:2056] from [4:8:2055] id 9 Sending message to [4:9:2056] from [4:8:2055] id 10 2025-10-24T16:23:04.925918Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [4:9:2056] 2025-10-24T16:23:04.925935Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [4:9:2056] 2025-10-24T16:23:04.925940Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [4:9:2056] 2025-10-24T16:23:04.925944Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [4:9:2056] 2025-10-24T16:23:04.925963Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [4:9:2056] 2025-10-24T16:23:04.925970Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [4:9:2056] 2025-10-24T16:23:04.925976Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [4:9:2056] 2025-10-24T16:23:04.925982Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [4:9:2056] 2025-10-24T16:23:04.925988Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [4:9:2056] 2025-10-24T16:23:04.925993Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [4:9:2056] 2025-10-24T16:23:04.926053Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [4:9:2056] 2025-10-24T16:23:04.926468Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [4:9:2056] 2025-10-24T16:23:04.932973Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [4:9:2056] 2025-10-24T16:23:04.933363Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [4:9:2056] 2025-10-24T16:23:04.961823Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [4:9:2056] 2025-10-24T16:23:04.962269Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [4:9:2056] 2025-10-24T16:23:05.063590Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [4:9:2056] 2025-10-24T16:23:05.063977Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [4:9:2056] 2025-10-24T16:23:05.096511Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [4:9:2056] 2025-10-24T16:23:05.097393Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [4:9:2056] 2025-10-24T16:23:05.358522Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [4:9:2056] 2025-10-24T16:23:05.359364Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [4:9:2056] 2025-10-24T16:23:05.519762Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [4:9:2056] 2025-10-24T16:23:05.520572Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [4:9:2056] 2025-10-24T16:23:05.813686Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [4:9:2056] 2025-10-24T16:23:05.820213Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [4:9:2056] 2025-10-24T16:23:05.855035Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [4:9:2056] 2025-10-24T16:23:05.855794Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [4:9:2056] 2025-10-24T16:23:05.876361Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [4:9:2056] 2025-10-24T16:23:05.876610Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [4:9:2056] 2025-10-24T16:23:05.970937Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:9:2056] Initiator [4:8:2055] 2025-10-24T16:23:06.090170Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [4:8:2055] 2025-10-24T16:23:06.090572Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [4:8:2055] 2025-10-24T16:23:06.110395Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:8:2055] Initiator [4:7:2054] TEST 2 1 duration 1.377081s 2025-10-24T16:23:06.206914Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [5:7:2054] self [5:8:2055] worker 0 Sending message to [5:8:2055] from [5:8:2055] id 1 Sending message to [5:8:2055] from [5:8:2055] id 2 Sending message to [5:8:2055] from [5:8:2055] id 3 Sending message to [5:8:2055] from [5:8:2055] id 4 Sending message to [5:8:2055] from [5:8:2055] id 5 Sending message to [5:8:2055] from [5:8:2055] id 6 Sending message to [5:8:2055] from [5:8:2055] id 7 Sending message to [5:8:2055] from [5:8:2055] id 8 Sending message to [5:8:2055] from [5:8:2055] id 9 Sending message to [5:8:2055] from [5:8:2055] id 10 2025-10-24T16:23:06.307410Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [5:8:2055] 2025-10-24T16:23:06.307426Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [5:8:2055] 2025-10-24T16:23:06.307431Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [5:8:2055] 2025-10-24T16:23:06.307435Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [5:8:2055] 2025-10-24T16:23:06.307440Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [5:8:2055] 2025-10-24T16:23:06.307465Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [5:8:2055] 2025-10-24T16:23:06.307472Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [5:8:2055] 2025-10-24T16:23:06.307481Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [5:8:2055] 2025-10-24T16:23:06.307488Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [5:8:2055] 2025-10-24T16:23:06.307495Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [5:8:2055] 2025-10-24T16:23:06.307559Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [5:8:2055] 2025-10-24T16:23:06.308034Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [5:8:2055] 2025-10-24T16:23:06.313968Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [5:8:2055] 2025-10-24T16:23:06.314446Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [5:8:2055] 2025-10-24T16:23:06.321155Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [5:8:2055] 2025-10-24T16:23:06.321597Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [5:8:2055] 2025-10-24T16:23:06.332976Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [5:8:2055] 2025-10-24T16:23:06.333460Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [5:8:2055] 2025-10-24T16:23:06.341476Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [5:8:2055] 2025-10-24T16:23:06.341961Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [5:8:2055] 2025-10-24T16:23:06.363900Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [5:8:2055] 2025-10-24T16:23:06.364374Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [5:8:2055] 2025-10-24T16:23:06.378587Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [5:8:2055] 2025-10-24T16:23:06.378998Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [5:8:2055] 2025-10-24T16:23:06.386836Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [5:8:2055] 2025-10-24T16:23:06.387279Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [5:8:2055] 2025-10-24T16:23:06.393470Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [5:8:2055] 2025-10-24T16:23:06.393938Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [5:8:2055] 2025-10-24T16:23:06.400451Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [5:8:2055] 2025-10-24T16:23:06.400894Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [5:8:2055] 2025-10-24T16:23:06.424246Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [5:8:2055] Initiator [5:7:2054] TEST 2 1 duration 0.325567s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:42.123585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:42.123603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.123608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:42.123615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:42.123621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:42.123626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:42.123633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.123662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:42.123754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:42.123802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:42.145375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:42.145403Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:42.145497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.148230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:42.148309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:42.148351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:42.150696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:42.150814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:42.150908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.150967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:42.151691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.151734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:42.151957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:42.151968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.151991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:42.151998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:42.152004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:42.152026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:42.153260Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.170207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:42.170264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.170313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:42.170320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:42.170362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:42.170375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:42.170901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.170944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:42.170988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.171004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:42.171009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:42.171013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:42.171355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.171366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:42.171371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:42.171661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.171671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.171676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:42.171682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:42.172300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:42.172633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:42.172750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:42.172941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.172963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... Id: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367312Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367318Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 5, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:23:06.367324Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-10-24T16:23:06.367330Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:23:06.367425Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367435Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367440Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:23:06.367444Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-10-24T16:23:06.367448Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:23:06.367659Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367672Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 6 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367676Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:23:06.367681Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 6 2025-10-24T16:23:06.367686Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:23:06.367900Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367919Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367923Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:23:06.367931Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 5 2025-10-24T16:23:06.367936Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:23:06.367991Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.367998Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.368001Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:23:06.368003Z node 61 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 2 2025-10-24T16:23:06.368006Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-10-24T16:23:06.368011Z node 61 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-10-24T16:23:06.368460Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.368476Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.368485Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.368492Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:23:06.368677Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-10-24T16:23:06.368719Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:23:06.368724Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-10-24T16:23:06.368786Z node 61 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:23:06.368799Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:23:06.368802Z node 61 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [61:401:2380] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:23:06.368859Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Valid/x/y/z" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:06.368901Z node 61 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Valid/x/y/z" took 53us result status StatusSuccess 2025-10-24T16:23:06.368966Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Valid/x/y/z" PathDescription { Self { Name: "z" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } FileStoreDescription { Name: "z" PathId: 7 IndexTabletId: 72075186233409546 Config { Version: 1 FileSystemId: "Valid/x/y/z" FolderId: "folder" CloudId: "cloud" BlockSize: 4096 BlocksCount: 4096 ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-1" } ExplicitChannelProfiles { PoolKind: "pool-kind-2" } } Version: 1 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:06.369013Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Invalid" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:06.369031Z node 61 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Invalid" took 19us result status StatusPathDoesNotExist 2025-10-24T16:23:06.369051Z node 61 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Invalid\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Invalid" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTabletPipeTest::TestPipeConnectToHint >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> SystemView::QueryMetricsSimple [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-10-24T16:23:06.389308Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.389318Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.389323Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.389435Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.389608Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:06.389630Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.391438Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.391443Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.391447Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.391505Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.392023Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:06.392038Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392300Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392304Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392306Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.392372Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:06.392383Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392386Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392422Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-10-24T16:23:06.392968Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392973Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.392976Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.393027Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-10-24T16:23:06.393034Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.393037Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.393044Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-10-24T16:23:06.393268Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-10-24T16:23:06.393273Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-10-24T16:23:06.393275Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.404014Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.404210Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.445163Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-10-24T16:23:06.445376Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.445462Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-10-24T16:23:06.446531Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-10-24T16:23:06.446597Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.446603Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:06.446608Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:06.446612Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-10-24T16:23:06.446617Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-10-24T16:23:06.446621Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-10-24T16:23:06.446624Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-10-24T16:23:06.446627Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-10-24T16:23:06.446638Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-10-24T16:23:06.446643Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-10-24T16:23:06.446646Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-10-24T16:23:06.446650Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-10-24T16:23:06.446654Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-10-24T16:23:06.446658Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-10-24T16:23:06.446662Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-10-24T16:23:06.446666Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-10-24T16:23:06.446675Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-10-24T16:23:06.446679Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-10-24T16:23:06.446682Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-10-24T16:23:06.446685Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-10-24T16:23:06.446689Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-10-24T16:23:06.446692Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-10-24T16:23:06.446695Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-10-24T16:23:06.446698Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-10-24T16:23:06.446702Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-10-24T16:23:06.446705Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-10-24T16:23:06.446708Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-10-24T16:23:06.446712Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-10-24T16:23:06.446715Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-10-24T16:23:06.446718Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-10-24T16:23:06.446722Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-10-24T16:23:06.446726Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-10-24T16:23:06.446741Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-10-24T16:23:06.446744Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-10-24T16:23:06.446748Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-10-24T16:23:06.446752Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-10-24T16:23:06.446755Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-10-24T16:23:06.446758Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-10-24T16:23:06.446762Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-10-24T16:23:06.446765Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-10-24T16:23:06.446768Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-10-24T16:23:06.446772Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-10-24T16:23:06.446775Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-10-24T16:23:06.446780Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-10-24T16:23:06.446783Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-10-24T16:23:06.446786Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-10-24T16:23:06.446790Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-10-24T16:23:06.446793Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-10-24T16:23:06.446797Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-10-24T16:23:06.446800Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-10-24T16:23:06.446813Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-10-24T16:23:06.446931Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-10-24T16:23:06.446963Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-10-24T16:23:06.446967Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-10-24T16:23:06.446971Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-10-24T16:23:06.446974Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-10-24T16:23:06.446978Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-10-24T16:23:06.446981Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-10-24T16:23:06.446984Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-10-24T16:23:06.446987Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-10-24T16:23:06.446992Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-10-24T16:23:06.446995Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-10-24T16:23:06.446998Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-10-24T16:23:06.447001Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-10-24T16:23:06.447004Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-10-24T16:23:06.447008Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-10-24T16:23:06.447011Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-10-24T16:23:06.447014Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-10-24T16:23:06.447020Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-10-24T16:23:06.447023Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-10-24T16:23:06.447026Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-10-24T16:23:06.447030Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-10-24T16:23:06.447032Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-10-24T16:23:06.447035Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-10-24T16:23:06.447038Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-10-24T16:23:06.447042Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-10-24T16:23:06.447044Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-10-24T16:23:06.447047Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-10-24T16:23:06.447050Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-10-24T16:23:06.447054Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-10-24T16:23:06.447057Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-10-24T16:23:06.447060Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-10-24T16:23:06.447064Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-10-24T16:23:06.447067Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-10-24T16:23:06.447075Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-10-24T16:23:06.447078Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-10-24T16:23:06.447081Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-10-24T16:23:06.447084Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-10-24T16:23:06.447087Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-10-24T16:23:06.447090Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-10-24T16:23:06.447093Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-10-24T16:23:06.447096Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-10-24T16:23:06.447100Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-10-24T16:23:06.447104Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-10-24T16:23:06.447106Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-10-24T16:23:06.447109Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-10-24T16:23:06.447113Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-10-24T16:23:06.447116Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-10-24T16:23:06.447120Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-10-24T16:23:06.447123Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-10-24T16:23:06.447126Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-10-24T16:23:06.447129Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-10-24T16:23:06.447134Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-10-24T16:23:06.447173Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-10-24T16:23:06.447575Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.447579Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.447581Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.447636Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.447724Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.448122Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.448644Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.603975Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.604132Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-10-24T16:23:06.604159Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.604169Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-10-24T16:23:06.604193Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-10-24T16:23:06.811913Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-10-24T16:23:06.915938Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-10-24T16:23:06.915995Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-10-24T16:23:06.916047Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-10-24T16:23:06.916460Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.916466Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.916470Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.927979Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.940140Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.940214Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.940540Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:07.041158Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.041212Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-10-24T16:23:07.041228Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:07.041235Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-10-24T16:23:07.041256Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-10-24T16:23:07.041276Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-10-24T16:23:07.041326Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-10-24T16:23:07.041344Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-10-24T16:23:07.041371Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |53.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] |53.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> TResourceBroker::TestAutoTaskId [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries |53.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-10-24T16:22:58.175255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.380151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.380229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.380238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000977/r3tmp/tmpG2OKF7/pdisk_1.dat 2025-10-24T16:22:58.658174Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.659314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.659350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.659481Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976646096 != 1761322976646099 2025-10-24T16:22:58.693446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.774629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.825489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:58.906823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:58.938789Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:22:58.939061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.001383Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:22:59.001443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.002960Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.002994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.003162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:59.003171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:59.003178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:59.003231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.003253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.003265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:22:59.003327Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.003343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.003446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:59.003453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:59.003459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:59.003485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.003496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.003503Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:22:59.013797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.037963Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:59.038044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.038071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:22:59.038076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.038080Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:59.038085Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.038117Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.038124Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:59.038132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.038138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:22:59.038141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:59.038144Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:59.038147Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:59.038308Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:59.038338Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:59.038343Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:59.038351Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:59.038373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.038378Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.038385Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:59.038390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.038395Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:59.038398Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.038402Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:22:59.038405Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:22:59.038493Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2570], serverId# [1:690:2577], sessionId# [0:0:0] 2025-10-24T16:22:59.038499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:680:2571], serverId# [1:692:2578], sessionId# [0:0:0] 2025-10-24T16:22:59.038521Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:59.038570Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:59.038585Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:59.038651Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-10-24T16:22:59.038668Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:22:59.039271Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-10-24T16:22:59.039556Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.039567Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:22:59.049975Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:59.050015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.050038Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-10-24T16:22:59.050046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.200530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2608], serverId# [1:743:2611], sessionId# [0:0:0] 2025-10-24T16:22:59.200572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server conne ... ve planned 0 immediate 0 planned 0 2025-10-24T16:23:06.677714Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:06.677725Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:06.677732Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:06.677775Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:23:06.677801Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:06.677822Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-10-24T16:23:06.677827Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:06.677861Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:06.677877Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-10-24T16:23:06.677947Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:06.678017Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:06.678664Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-10-24T16:23:06.678683Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:23:06.679093Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:23:06.679108Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:06.679121Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:23:06.679129Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:23:06.679136Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-10-24T16:23:06.679154Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:06.679165Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:06.679176Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:06.679238Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:23:06.679243Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:06.679492Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:06.679501Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:06.679506Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:06.679515Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:06.679521Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:06.679529Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:06.679832Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.679866Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.680513Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.680562Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.680601Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-10-24T16:23:06.680610Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-10-24T16:23:06.680726Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:06.680734Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:06.682517Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.682540Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:797:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.682549Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.682708Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:803:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.682729Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.683537Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:06.684735Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.684759Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.731744Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:06.840912Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.840957Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.841459Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:06.877085Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:875:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:06.908388Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgfv5t1rhww41q35tcjp43, Database: , SessionId: ydb://session/3?node_id=4&id=NDcyN2I0MjgtNjMxMGU2NTItZjYzOWViYmQtNWY1NjVkZTM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:06.908956Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:950:2734], serverId# [4:951:2735], sessionId# [0:0:0] 2025-10-24T16:23:06.909064Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-10-24T16:23:06.909142Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322986909110 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:06.909169Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-10-24T16:23:06.920404Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:06.920429Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:06.921616Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:957:2740], serverId# [4:958:2741], sessionId# [0:0:0] 2025-10-24T16:23:06.922570Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:959:2742], serverId# [4:960:2743], sessionId# [0:0:0] >> BootstrapperTest::LoneBootstrapper >> CdcStreamChangeCollector::NewImage [GOOD] >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries >> TabletState::NormalLifecycle >> Viewer::JsonAutocompleteStartOfDatabaseName >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] >> Viewer::TabletMerging >> Viewer::JsonAutocompleteSimilarDatabaseName >> Viewer::PDiskMerging |53.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |53.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |53.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |53.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-10-24T16:22:58.756942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.816461Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.816540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.816549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00092c/r3tmp/tmpqv4nnU/pdisk_1.dat 2025-10-24T16:22:59.068434Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:59.070427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:59.070460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:59.070586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322977339017 != 1761322977339020 2025-10-24T16:22:59.103022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:59.191751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:59.234621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:59.341037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:59.389633Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:22:59.389682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.411882Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:22:59.412652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.434941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.434986Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.435385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:59.435393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:59.435400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:59.435453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.435476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.435488Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:22:59.435796Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.435811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.444049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:59.444075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:59.444084Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:59.444144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.444201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.444218Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:22:59.457412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.524097Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:59.524178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.524210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:22:59.524217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.524222Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:59.524228Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.524264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.524274Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:59.524285Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.524294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:22:59.524298Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:59.524302Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:59.524306Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:59.524465Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:59.524493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:59.524501Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:59.524511Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:59.524534Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.524541Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.524550Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:59.524555Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.524562Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:59.524566Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.524570Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:22:59.524575Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:22:59.524675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2570], serverId# [1:690:2577], sessionId# [0:0:0] 2025-10-24T16:22:59.524684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:680:2571], serverId# [1:692:2578], sessionId# [0:0:0] 2025-10-24T16:22:59.524708Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:59.524759Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:59.524778Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:59.524860Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-10-24T16:22:59.524886Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:22:59.524898Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-10-24T16:22:59.525257Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.525275Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:22:59.535920Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:59.535955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.535973Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-10-24T16:22:59.535980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.700691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2608], serverId# [1:743:2611], sessionId# [0:0:0] 2025-10-24T16:22:59.700731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server conne ... TxInFly 0 2025-10-24T16:23:06.659264Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:06.659417Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:23:06.659423Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:06.659539Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:06.659545Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:06.659548Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:06.659557Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:06.659563Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:06.659569Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:06.659579Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-10-24T16:23:06.659582Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:23:06.659725Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-10-24T16:23:06.659733Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-10-24T16:23:06.660214Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.660231Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.660240Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:23:06.660267Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-10-24T16:23:06.660272Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-10-24T16:23:06.660277Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2025-10-24T16:23:06.660287Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:06.660293Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:06.660301Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:23:06.660886Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.660952Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-10-24T16:23:06.660958Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-10-24T16:23:06.660982Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.661070Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.661096Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:06.661101Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:06.661285Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-10-24T16:23:06.661292Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-10-24T16:23:06.662869Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:836:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.662889Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:846:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.662897Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.663071Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:851:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.663095Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:06.663877Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:06.664868Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.664890Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.664898Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:23:06.709956Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:06.818797Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:06.818836Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:06.818849Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:23:06.819352Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:850:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:06.856069Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:924:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:06.869805Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgfv56efhwmabbpsh1wda0, Database: , SessionId: ydb://session/3?node_id=4&id=YjU2MGUwOTUtZmJmYzM2Yi1kZTAyMWY4Ni1mOGUwY2ZkYQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:06.870410Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1029:2779], serverId# [4:1030:2780], sessionId# [0:0:0] 2025-10-24T16:23:06.870538Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-10-24T16:23:06.870617Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322986870584 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:06.870642Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322986870584 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:06.870661Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-10-24T16:23:06.883647Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:06.883673Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:06.884764Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1036:2785], serverId# [4:1037:2786], sessionId# [0:0:0] 2025-10-24T16:23:06.885693Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1038:2787], serverId# [4:1039:2788], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] Test command err: 2025-10-24T16:23:07.532841Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:07.532978Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.532987Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.532998Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:07.533005Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533018Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 800.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:07.533028Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533033Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533038Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533049Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533054Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533060Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-4 (4 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533064Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533068Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533075Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533079Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533083Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533092Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-6 (6 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533097Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533101Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533107Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533111Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533116Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533122Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-8 (8 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533127Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-8 (8 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533134Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533140Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-9 (9 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533144Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-9 (9 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533148Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533155Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-10 (10 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533159Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-10 (10 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533163Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533170Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-11 (11 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533174Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-11 (11 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533178Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533184Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-12 (12 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533189Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-12 (12 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533196Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533205Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-13 (13 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533209Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-13 (13 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533213Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533219Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-14 (14 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533224Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-14 (14 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533228Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533234Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-15 (15 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533238Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-15 (15 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533242Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533249Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-16 (16 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533253Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-16 (16 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533257Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533265Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-17 (17 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533269Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-17 (17 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533273Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533280Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-18 (18 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533284Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-18 (18 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533289Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533295Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-19 (19 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533299Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-19 (19 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533304Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533310Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-20 (20 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533314Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-20 (20 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533318Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533325Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-21 (21 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533329Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-21 (21 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533333Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533339Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-22 (22 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533343Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-22 (22 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533348Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533355Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-23 (23 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533359Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-23 (23 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.533363Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.533370Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-24 (24 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.533374Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-24 (24 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.5 ... s to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534278Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-83 (83 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534282Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-83 (83 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534300Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534307Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-84 (84 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534311Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-84 (84 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534315Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534322Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-85 (85 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534326Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-85 (85 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534330Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534336Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-86 (86 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534340Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-86 (86 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534345Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534355Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-87 (87 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534359Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-87 (87 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534364Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534370Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-88 (88 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534374Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-88 (88 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534378Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534385Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-89 (89 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534400Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-89 (89 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534405Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534411Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-90 (90 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534415Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-90 (90 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534420Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534426Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-91 (91 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534430Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-91 (91 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534434Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534442Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-92 (92 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534446Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-92 (92 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534451Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534457Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-93 (93 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534461Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-93 (93 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534465Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534472Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-94 (94 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534476Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-94 (94 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534480Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534486Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-95 (95 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534491Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-95 (95 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534495Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534501Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-96 (96 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534506Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-96 (96 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534510Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534516Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-97 (97 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534520Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-97 (97 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534525Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534532Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-98 (98 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534539Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-98 (98 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534543Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534549Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-99 (99 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534553Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-99 (99 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534558Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534564Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-100 (100 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:07.534570Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-100 (100 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:07.534574Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534588Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-0 (0 by [1:105:2138]) priority=5 resources={100, 100} 2025-10-24T16:23:07.534594Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:404: Use ID 101 for submitted task 2025-10-24T16:23:07.534599Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-0 (0 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:07.534604Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-0 (0 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:07.534608Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-0 (0 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:07.534614Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-0 (0 by [1:105:2138])) 2025-10-24T16:23:07.534619Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:07.534629Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-102 (102 by [1:105:2138]) priority=5 resources={100, 100} 2025-10-24T16:23:07.534634Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-102 (102 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:07.534638Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-102 (102 by [1:105:2138]) 2025-10-24T16:23:07.534642Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:07.534651Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-0 (0 by [1:105:2138]) (release resources {100, 100}) 2025-10-24T16:23:07.534658Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 200.000000 to 10.000000 (remove task task-0 (0 by [1:105:2138])) 2025-10-24T16:23:07.534664Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 10.000000 2025-10-24T16:23:07.534671Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction0 from 0.000000 to 80.000000 (in-fly consumption {400, 400}) 2025-10-24T16:23:07.534675Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {100, 100} for task task-102 (102 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:07.534680Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-102 (102 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:07.534685Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 10.000000 to 200.500000 (insert task task-102 (102 by [1:105:2138])) 2025-10-24T16:23:07.534691Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-10-24T16:23:06.990990Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.990999Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.991002Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.000037Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:07.000054Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.000057Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.000093Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008457s 2025-10-24T16:23:07.002246Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.023931Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:07.023984Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.028335Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.028342Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.028346Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.031934Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:07.031953Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.031956Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.031978Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007310s 2025-10-24T16:23:07.036153Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.050013Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:07.050057Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052332Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052339Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052343Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.061702Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-10-24T16:23:07.061720Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.061726Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.061746Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.229524s 2025-10-24T16:23:07.063909Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.079909Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:07.079951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.105526Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.105533Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.105537Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.111937Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-10-24T16:23:07.111956Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.111962Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.111981Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.176504s 2025-10-24T16:23:07.115960Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.128013Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-10-24T16:23:07.128055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.140144Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.140151Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.140155Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.151909Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.153396Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:07.190240Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.191951Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-10-24T16:23:07.191964Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.191968Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.191974Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.273152s 2025-10-24T16:23:07.192051Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-10-24T16:23:07.235697Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.235707Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.235711Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.235784Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.250062Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:07.250135Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.252216Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:07.371914Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.372004Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-10-24T16:23:07.372022Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:07.372032Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-10-24T16:23:07.372048Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-10-24T16:23:07.475951Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-10-24T16:23:07.476018Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-10-24T16:23:07.476384Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.476389Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.476392Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.488083Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:07.494346Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:07.494438Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.494794Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:07.596119Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.596173Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-10-24T16:23:07.596187Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:07.596194Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-10-24T16:23:07.596212Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-10-24T16:23:07.596228Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-10-24T16:23:07.596280Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-10-24T16:23:07.596288Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-10-24T16:23:07.599275Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/sys_view/ut/unittest >> SystemView::QueryMetricsSimple [GOOD] Test command err: 2025-10-24T16:20:52.197368Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824047502707979:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:52.197385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:52.201027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001661/r3tmp/tmphE2INr/pdisk_1.dat 2025-10-24T16:20:52.240416Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21072, node 1 2025-10-24T16:20:52.257729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:20:52.257740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:20:52.257741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:20:52.257784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9027 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:20:52.292712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: 2025-10-24T16:20:52.299080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:52.299114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:52.304686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:20:52.311008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) waiting... 2025-10-24T16:20:52.319541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:52.319566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:52.320988Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:20:52.321155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:52.321168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-10-24T16:20:52.322554Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:20:52.372156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:52.372278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:52.372303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:52.375837Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:20:52.375888Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant1/.metadata/script_executions 2025-10-24T16:20:52.387103Z node 4 :SYSTEM_VIEWS DEBUG: partition_stats.cpp:32: NSysView::TPartitionStatsCollector bootstrapped 2025-10-24T16:20:52.387187Z node 4 :SYSTEM_VIEWS INFO: processor_impl.cpp:41: [72075186224037893] OnActivateExecutor 2025-10-24T16:20:52.387198Z node 4 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:15: [72075186224037893] TTxInitSchema::Execute 2025-10-24T16:20:52.387869Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:52.388598Z node 4 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:42: [72075186224037893] TTxInitSchema::Complete 2025-10-24T16:20:52.388608Z node 4 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:45: [72075186224037893] tablet is offline 2025-10-24T16:20:52.388871Z node 4 :SYSTEM_VIEWS DEBUG: tx_configure.cpp:20: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2025-10-24T16:20:52.389733Z node 4 :SYSTEM_VIEWS INFO: partition_stats.cpp:522: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-10-24T16:20:52.389924Z node 4 :SYSTEM_VIEWS DEBUG: tx_configure.cpp:30: [72075186224037893] TTxConfigure::Complete 2025-10-24T16:20:52.394081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) waiting... 2025-10-24T16:20:52.403968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:52.403989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:52.405100Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7564824050713708420:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:52.405188Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:52.405288Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-10-24T16:20:52.405462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:52.406375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:52.406408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:52.407963Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions 2025-10-24T16:20:52.408089Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:20:52.408353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:52.410833Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824048282468721:2218];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:52.410921Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:52.413125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Tenant2/.metadata/script_executions waiting... 2025-10-24T16:20:52.433126Z node 2 :SYSTEM_VIEWS INFO: processor_impl.cpp:41: [72075186224037899] OnActivateExecutor 2025-10-24T16:20:52.433141Z node 2 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:15: [72075186224037899] TTxInitSchema::Execute 2025-10-24T16:20:52.433775Z node 2 :SYSTEM_VIEWS DEBUG: partition_stats.cpp:32: NSysView::TPartitionStatsCollector bootstrapped 2025-10-24T16:20:52.433905Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:52.434434Z node 2 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:42: [72075186224037899] TTxInitSchema::Complete 2025-10-24T16:20:52.434444Z node 2 :SYSTEM_VIEWS DEBUG: tx_init_schema.cpp:45: [72075186224037899] tablet is offline 2025-10-24T16:20:52.434788Z node 2 :SYSTEM_VIEWS DEBUG: tx_configure.cpp:20: [72075186224037899] TTxConfigure::Execute: database# /Root/Tenant2 2025-10-24T16:20:52.435937Z node 2 :SYSTEM_VIEWS INFO: partition_stats.cpp:522: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 3], sysview processor id# 72075186224037899 2025-10-24T16:20:52.435956Z node 2 :SYSTEM_VIEWS DEBUG: tx_configure.cpp:30: [72075186224037899] TTxConfigure::Complete 2025-10-24T16:20:52.709208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:52.731685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:756482404 ... processor ids count# 1, processor id to database count# 1 2025-10-24T16:23:05.004133Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:594: Handle TEvPrivate::TEvProcessInterval: service id# [83:7564824576133788221:2063], interval end# 2025-10-24T16:23:05.000000Z, event interval end# 2025-10-24T16:23:05.000000Z 2025-10-24T16:23:05.004149Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [83:7564824576133788221:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-10-24T16:23:05.007891Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:594: Handle TEvPrivate::TEvProcessInterval: service id# [83:7564824576133788301:2078], interval end# 2025-10-24T16:23:05.000000Z, event interval end# 2025-10-24T16:23:05.000000Z 2025-10-24T16:23:05.007903Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [83:7564824576133788301:2078], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-10-24T16:23:05.007997Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:594: Handle TEvPrivate::TEvProcessInterval: service id# [81:7564824579063230787:2072], interval end# 2025-10-24T16:23:05.000000Z, event interval end# 2025-10-24T16:23:05.000000Z 2025-10-24T16:23:05.008015Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [81:7564824579063230787:2072], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-10-24T16:23:05.008065Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:594: Handle TEvPrivate::TEvProcessInterval: service id# [81:7564824579063230868:2079], interval end# 2025-10-24T16:23:05.000000Z, event interval end# 2025-10-24T16:23:05.000000Z 2025-10-24T16:23:05.008072Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [81:7564824579063230868:2079], query logs count# 1, processor ids count# 1, processor id to database count# 1 2025-10-24T16:23:05.028178Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [82:7564824577091565488:2063] 2025-10-24T16:23:05.031908Z node 83 :SYSTEM_VIEWS DEBUG: tx_collect.cpp:14: [72075186224037893] TTxCollect::Execute 2025-10-24T16:23:05.031933Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:182: [72075186224037893] PersistPartitionTopResults: table id# 17, partition interval end# 2025-10-24T16:23:05.000000Z, partition count# 0 2025-10-24T16:23:05.031937Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:182: [72075186224037893] PersistPartitionTopResults: table id# 20, partition interval end# 2025-10-24T16:23:05.000000Z, partition count# 0 2025-10-24T16:23:05.031940Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:182: [72075186224037893] PersistPartitionTopResults: table id# 18, partition interval end# 2025-10-24T17:00:00.000000Z, partition count# 0 2025-10-24T16:23:05.031943Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:182: [72075186224037893] PersistPartitionTopResults: table id# 21, partition interval end# 2025-10-24T17:00:00.000000Z, partition count# 0 2025-10-24T16:23:05.032046Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:333: [72075186224037893] Reset: interval end# 2025-10-24T16:23:05.000000Z 2025-10-24T16:23:05.053331Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [82:7564824577091565564:2077] 2025-10-24T16:23:05.053673Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:523: Send counters: service id# [82:7564824577091565564:2077], processor id# 72075186224037893, database# /Root/Tenant1, generation# 14368741197979772588, node id# 82, is retrying# 0, is labeled# 0 2025-10-24T16:23:05.054091Z node 83 :SYSTEM_VIEWS DEBUG: db_counters.cpp:449: [72075186224037893] TEvSendDbCountersRequest: node id# 82, generation# 14368741197979772588, services count# 1, request size# 278 2025-10-24T16:23:05.054295Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:735: Handle TEvSysView::TEvSendDbCountersResponse: service id# [82:7564824577091565564:2077], database# /Root/Tenant1, generation# 14368741197979772588 2025-10-24T16:23:05.056424Z node 83 :SYSTEM_VIEWS DEBUG: tx_collect.cpp:29: [72075186224037893] TTxCollect::Complete 2025-10-24T16:23:05.296017Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [81:7564824579063230787:2072] 2025-10-24T16:23:05.439885Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [83:7564824576133788301:2078] 2025-10-24T16:23:05.448950Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [83:7564824576133788221:2063] 2025-10-24T16:23:05.619901Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [82:7564824577091565564:2077] 2025-10-24T16:23:05.672016Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [81:7564824579063230868:2079] 2025-10-24T16:23:05.679815Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [83:7564824576133788301:2078] 2025-10-24T16:23:05.680822Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:523: Send counters: service id# [83:7564824576133788301:2078], processor id# 72075186224037893, database# /Root/Tenant1, generation# 8381512990642804728, node id# 83, is retrying# 0, is labeled# 0 2025-10-24T16:23:05.684233Z node 83 :SYSTEM_VIEWS DEBUG: db_counters.cpp:449: [72075186224037893] TEvSendDbCountersRequest: node id# 83, generation# 8381512990642804728, services count# 1, request size# 817 2025-10-24T16:23:05.684267Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:735: Handle TEvSysView::TEvSendDbCountersResponse: service id# [83:7564824576133788301:2078], database# /Root/Tenant1, generation# 8381512990642804728 2025-10-24T16:23:05.716650Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [81:7564824579063230868:2079] 2025-10-24T16:23:05.716770Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:523: Send counters: service id# [81:7564824579063230868:2079], processor id# 72075186224037893, database# /Root/Tenant1, generation# 15519701132783456570, node id# 81, is retrying# 0, is labeled# 0 2025-10-24T16:23:05.719970Z node 83 :SYSTEM_VIEWS DEBUG: db_counters.cpp:449: [72075186224037893] TEvSendDbCountersRequest: node id# 81, generation# 15519701132783456570, services count# 1, request size# 251 2025-10-24T16:23:05.720738Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:735: Handle TEvSysView::TEvSendDbCountersResponse: service id# [81:7564824579063230868:2079], database# /Root/Tenant1, generation# 15519701132783456570 2025-10-24T16:23:05.800014Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [81:7564824579063230787:2072] 2025-10-24T16:23:05.808073Z node 83 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:659: Handle TEvPrivate::TEvProcessCounters: service id# [83:7564824576133788221:2063] 2025-10-24T16:23:05.899725Z node 83 :SYSTEM_VIEWS DEBUG: db_counters.cpp:543: [72075186224037893] TEvApplyCounters: services count# 2 2025-10-24T16:23:05.907940Z node 83 :SYSTEM_VIEWS DEBUG: partition_stats.cpp:510: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-10-24T16:23:05.907912Z 2025-10-24T16:23:05.908058Z node 83 :SYSTEM_VIEWS DEBUG: tx_top_partitions.cpp:125: [72075186224037893] TTxTopPartitions::Execute: , partition by CPU count# 0, partition by TLI count# 0 2025-10-24T16:23:05.908077Z node 83 :SYSTEM_VIEWS DEBUG: tx_top_partitions.cpp:137: [72075186224037893] TTxTopPartitions::Complete 2025-10-24T16:23:05.967925Z node 82 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:670: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [82:7564824577091565488:2063] 2025-10-24T16:23:06.256035Z node 81 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:389: Send interval summary: service id# [81:7564824579063230868:2079], processor id# 72075186224037893, database# /Root/Tenant1, interval end# 2025-10-24T16:23:05.000000Z, query count# 1 2025-10-24T16:23:06.256778Z node 83 :SYSTEM_VIEWS DEBUG: tx_interval_summary.cpp:166: [72075186224037893] TTxIntervalSummary::Execute: node id# 81, query count# 1 2025-10-24T16:23:06.261672Z node 83 :SYSTEM_VIEWS DEBUG: tx_interval_summary.cpp:194: [72075186224037893] TTxIntervalSummary::Complete 2025-10-24T16:23:06.679988Z node 81 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [81:7564824626307872568:2388], owner: [81:7564824626307872565:2386], scan id: 0, sys view info: Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:23:06.684235Z node 81 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [81:7564824626307872568:2388], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-10-24T16:23:06.696065Z node 83 :SYSTEM_VIEWS DEBUG: processor_impl.cpp:641: [72075186224037893] Reply batch: range# From { IntervalEndUs: 0 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 18446744073709551615 Rank: 4294967295 } InclusiveTo: true Type: METRICS_ONE_MINUTE , rows# 2, bytes# 435, next# 2025-10-24T16:23:06.700449Z node 81 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [81:7564824626307872568:2388], row count: 2, finished: 1 2025-10-24T16:23:06.700494Z node 81 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [81:7564824626307872568:2388], owner: [81:7564824626307872565:2386], scan id: 0, sys view info: Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72075186224037888 LocalId: 1 } 2025-10-24T16:23:06.701471Z node 81 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322986678, txId: 281474976715668] shutting down 2025-10-24T16:23:06.709576Z node 81 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 82 2025-10-24T16:23:06.709712Z node 81 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [81:7564824579063230868:2079], processor id# 72075186224037893, database# /Root/Tenant1 2025-10-24T16:23:06.709720Z node 81 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(82, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:23:06.709761Z node 81 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 83 2025-10-24T16:23:06.709765Z node 81 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [81:7564824579063230868:2079], database# /Root/Tenant1, processor id# 72075186224037893 2025-10-24T16:23:06.709864Z node 81 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(83, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:23:06.715084Z node 83 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [83:7564824576133788301:2078], processor id# 72075186224037893, database# /Root/Tenant1 2025-10-24T16:23:06.715213Z node 83 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [83:7564824576133788301:2078], database# /Root/Tenant1, processor id# 72075186224037893 2025-10-24T16:23:06.715383Z node 82 :SYSTEM_VIEWS WARN: sysview_service.cpp:812: Summary delivery problem: service id# [82:7564824577091565564:2077], processor id# 72075186224037893, database# /Root/Tenant1 2025-10-24T16:23:06.716227Z node 82 :SYSTEM_VIEWS INFO: sysview_service.cpp:881: Navigate by database succeeded: service id# [82:7564824577091565564:2077], database# /Root/Tenant1, processor id# 72075186224037893 >> Viewer::TabletMergingPacked >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> TabletState::NormalLifecycle [GOOD] >> TPipeCacheTest::TestIdleRefresh ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.576215Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_2/changelog.json 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_2/snapshot ...restarting tablet 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_3/changelog.json 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_3/snapshot ...restarting tablet again 00000.010 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_4/changelog.json 00000.010 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpXWhsFM/dummy/1/gen_4/snapshot 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {186b, 6} 00000.010 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.588562Z ...starting tablet 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.003 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpwSZaPs/dummy/1/gen_2/changelog.json 00000.003 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpwSZaPs/dummy/1/gen_2/snapshot 00000.072 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception NKikimr::NUtil::TTabletError: ydb/core/tablet_flat/flat_executor.cpp:5161: Backup changelog failed: Failed to create changelog file /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpwSZaPs/dummy/1/gen_2/changelog.json: (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpwSZaPs/dummy/1 ??+0 (0x10034BBF) ??+0 (0x10034B67) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&)+260 (0x127AFDE4) ??+0 (0x12781187) NActors::IActor::Receive(TAutoPtr&)+216 (0x108B45B8) ??+0 (0x10034BBF) ??+0 (0x10034D94) std::rethrow_exception(std::exception_ptr)+12 (0x1004610C) NActors::IActorExceptionHandler::OnUnhandledException(std::exception_ptr const&)+33 (0xF9E6D51) ...waiting tablet death 00000.072 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.072 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.072 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.072 II| FAKE_ENV: DS.0 gone, left {62b, 2}, put {62b, 2} 00000.072 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.072 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.072 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.072 II| FAKE_ENV: All BS storage groups are stopped 00000.072 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.072 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 17}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.661647Z ...starting tablet 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.001 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpAZ1HrQ/dummy/1/gen_2/changelog.json 00000.001 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpAZ1HrQ/dummy/1/gen_2/snapshot ...initing schema 00000.002 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.020 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpAZ1HrQ/dummy/1/gen_3/changelog.json 00000.021 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpAZ1HrQ/dummy/1/gen_3/snapshot 00000.022 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.022 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.053 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.053 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.053 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {219b, 7} 00000.053 II| FAKE_ENV: DS.1 gone, left {293b, 2}, put {328b, 3} 00000.053 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.053 II| FAKE_ENV: All BS storage groups are stopped 00000.053 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.053 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 22}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.717116Z ...starting tablet 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpGkSDJL/dummy/1/gen_2/changelog.json 00000.002 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpGkSDJL/dummy/1/gen_2/snapshot ...initing schema 00000.002 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns simultaneously 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...erasing row 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...replacing row 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing different values in one column 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.003 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing composite primary key 00000.004 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.020 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpGkSDJL/dummy/1/gen_3/changelog.json 00000.020 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpGkSDJL/dummy/1/gen_3/snapshot 00000.021 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.021 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.063 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.064 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.064 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.064 II| FAKE_ENV: DS.1 gone, left {1217b, 12}, put {1252b, 13} 00000.064 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.064 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.064 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {909b, 17} 00000.064 II| FAKE_ENV: All BS storage groups are stopped 00000.064 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.064 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 32}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:21:58.783045Z ...starting tablet 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.001 II| FAKE_ENV: Starting storage for BS group 1 00000.001 II| FAKE_ENV: Starting storage for BS group 2 00000.001 II| FAKE_ENV: Starting storage for BS group 3 00000.002 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpNt99jj/dummy/1/gen_2/changelog.json 00000.002 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpNt99jj/dummy/1/gen_2/snapshot ...initing schema 00000.002 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing large data 00002.458 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00005.616 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpNt99jj/dummy/1/gen_3/changelog.json 00005.616 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/fxx3/00150e/r3tmp/tmpNt99jj/dummy/1/gen_3/snapshot 00005.670 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.690 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.708 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.729 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.750 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.770 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.790 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.808 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.828 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.849 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.870 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.892 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.913 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.933 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00005.947 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBack ... 65} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84700 closedDataSize: 8822453 openedRowCount: 100 openedDataSize: 10550 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 305 currentKeyPointer: IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 1 processing event IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 304 currentKeyPointer: IsBegin: 0 Part: [0:0:848:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10550 BeginKey: {282451, 94158} EndKey: {282772, 94265} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 848 openedSortedByDataSize: 848 FutureEvents: 304 currentKeyPointer: IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 0 processing event IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 100 openedDataSize: 10111 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 303 currentKeyPointer: IsBegin: 1 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84800 closedDataSize: 8833003 openedRowCount: 100 openedDataSize: 10111 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 303 currentKeyPointer: IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 processing event IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 302 currentKeyPointer: IsBegin: 0 Part: [0:0:849:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10111 BeginKey: {282775, 94266} EndKey: {283117, 94380} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 849 openedSortedByDataSize: 849 FutureEvents: 302 currentKeyPointer: IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 0 processing event IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 100 openedDataSize: 10583 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 301 currentKeyPointer: IsBegin: 1 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 84900 closedDataSize: 8843114 openedRowCount: 100 openedDataSize: 10583 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 301 currentKeyPointer: IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 processing event IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 8 nextHistogramRowCount: 90000 nextHistogramDataSize: 9370665 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 300 currentKeyPointer: IsBegin: 0 Part: [0:0:850:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10583 BeginKey: {283123, 94382} EndKey: {283444, 94489} State: 2 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 850 openedSortedByDataSize: 850 FutureEvents: 300 currentKeyPointer: IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 0 processing event IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 0 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 100 openedDataSize: 10456 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 299 currentKeyPointer: IsBegin: 1 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 iterating stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85000 closedDataSize: 8853697 openedRowCount: 100 openedDataSize: 10456 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 299 currentKeyPointer: IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 processing event IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 1 checking stats.RowCountHistogram: 8 stats.DataSizeHistogram: 9 nextHistogramRowCount: 90000 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85100 closedDataSize: 8864153 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 298 currentKeyPointer: IsBegin: 0 Part: [0:0:851:0:0:0:0] PageId: 147 Level: 4 BeginRowId: 0 EndRowId: 100 BeginDataSize: 0 EndDataSize: 10456 BeginKey: {283447, 94490} EndKey: {283771, 94598} State: 2 finished stats.RowCountHistogram: 9 stats.DataSizeHistogram: 9 nextHistogramRowCount: 18446744073709551615 nextHistogramDataSize: 18446744073709551615 closedRowCount: 85100 closedDataSize: 8864153 openedRowCount: 0 openedDataSize: 0 openedSortedByRowCount: 851 openedSortedByDataSize: 851 FutureEvents: 298 Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-10-24T16:22:58.435173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.476116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.476203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.476215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000964/r3tmp/tmpxO8fDB/pdisk_1.dat 2025-10-24T16:22:58.606848Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.607939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.607973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.608095Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976688690 != 1761322976688693 2025-10-24T16:22:58.639787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.711942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.777475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:58.857642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:58.873426Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:22:58.873484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.917484Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:22:58.917540Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.922100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.922135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.922286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:58.922294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:58.922304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:58.922364Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.922388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.922400Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:22:58.922459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.922473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.922567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:58.922574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:58.922580Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:58.922601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.922611Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.922617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:22:58.936065Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.962620Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:58.962689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.962717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:22:58.962723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:58.962729Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:58.962735Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.962763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.962770Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:58.962778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.962785Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:22:58.962789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:58.962792Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:58.962795Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:58.962919Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:58.962938Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:58.962944Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:58.962952Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:58.962973Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.962980Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.962988Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:58.962993Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.963000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:58.963004Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.963008Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:22:58.963012Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:22:58.963105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2570], serverId# [1:690:2577], sessionId# [0:0:0] 2025-10-24T16:22:58.963120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:680:2571], serverId# [1:692:2578], sessionId# [0:0:0] 2025-10-24T16:22:58.963152Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:58.963227Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:58.963248Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:58.963349Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-10-24T16:22:58.963425Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:22:58.963440Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-10-24T16:22:58.963777Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:58.963790Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:22:58.976070Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:58.976104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:58.976126Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-10-24T16:22:58.976133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.132392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2608], serverId# [1:743:2611], sessionId# [0:0:0] 2025-10-24T16:22:59.132434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server conne ... :23:07.205527Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:07.205534Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:07.205546Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:23:07.206287Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:07.206360Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-10-24T16:23:07.206370Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-10-24T16:23:07.206413Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:07.206523Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:07.206555Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:07.206561Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:07.206766Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-10-24T16:23:07.206775Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-10-24T16:23:07.208896Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:836:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.208918Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:846:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.208929Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.209110Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:851:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.209131Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.210020Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:07.211145Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.211169Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:07.211180Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:23:07.264394Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:07.373714Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.373758Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:23:07.373772Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:23:07.374356Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:850:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:07.417160Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:924:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.456498Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgfvp809nnr6hq5mga65xk, Database: , SessionId: ydb://session/3?node_id=4&id=NzVmMDM4MGQtYjcwMjUyOGUtZWJmMjdkNDAtNzI1YTJkNTI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.457265Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1035:2784], serverId# [4:1036:2785], sessionId# [0:0:0] 2025-10-24T16:23:07.457402Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-10-24T16:23:07.457501Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322987457460 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.457531Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322987457460 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.457549Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-10-24T16:23:07.468203Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:07.468239Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:07.504602Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfvyd1d31tzsnshmzk7be, Database: , SessionId: ydb://session/3?node_id=4&id=OGI2OWE1ODUtNDY2MDQ3OWQtNjVhODA3ZWYtZWQ2ZjYwZDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.505500Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-10-24T16:23:07.505698Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1761322987505653 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.505738Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1761322987505653 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.505753Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1761322987505653 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.505767Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1761322987505653 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-10-24T16:23:07.505790Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-10-24T16:23:07.518323Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-10-24T16:23:07.518354Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:23:07.519562Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1084:2824], serverId# [4:1085:2825], sessionId# [0:0:0] 2025-10-24T16:23:07.520802Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1086:2826], serverId# [4:1087:2827], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-10-24T16:22:57.924429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.117667Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.117745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.117753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000952/r3tmp/tmpC3s2ER/pdisk_1.dat 2025-10-24T16:22:58.430596Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.440409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.440455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.440604Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976686012 != 1761322976686015 2025-10-24T16:22:58.476129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.574154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.629859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:58.712516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:58.746928Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:22:58.748827Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.771797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.771873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.772062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:58.772073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:58.772082Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:58.772157Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.772185Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.772202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:22:58.784062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.800760Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:58.800852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.800882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:22:58.800888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:58.800894Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:58.800899Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.801067Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:58.801093Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:58.801109Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.801117Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.801127Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:58.801132Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.801143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:22:58.801256Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:58.801321Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:58.801338Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:58.805840Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:58.816262Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:58.816306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:58.962670Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:22:58.963528Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:22:58.963544Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.963599Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.963607Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:22:58.963616Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:22:58.963675Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:22:58.963707Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:22:58.964017Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.964036Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:22:58.964408Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:22:58.964486Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.964810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:22:58.964818Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.964859Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:22:58.964869Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.965100Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.965109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:58.965115Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:22:58.965131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:22:58.965147Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:22:58.965157Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.965877Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:58.967812Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:22:58.967988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:22:58.968000Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:22:58.969899Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186 ... unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-10-24T16:23:07.105526Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-10-24T16:23:07.105577Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.133771Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:07.193537Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:07.294867Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:07.294893Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.294965Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.294974Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:07.294985Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:07.295043Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-10-24T16:23:07.295075Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:07.295130Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.295323Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:07.330899Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-10-24T16:23:07.330932Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:07.330941Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:07.330952Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.330979Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:07.330993Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-10-24T16:23:07.331007Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.331549Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-10-24T16:23:07.331562Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:07.332884Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.332906Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.332931Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.333046Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.333119Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.333736Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:07.334950Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.520097Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.520597Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:23:07.545003Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.577301Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfvt4ajvh06qte76wd9sa, Database: , SessionId: ydb://session/3?node_id=4&id=MTYzY2YyNzgtM2FjYTk4Ni03NzU5OTZkNi1kY2U5YmY5NA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.577870Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:985:2772], serverId# [4:986:2773], sessionId# [0:0:0] 2025-10-24T16:23:07.577991Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-10-24T16:23:07.578064Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322987578032 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:07.578091Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-10-24T16:23:07.594146Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:07.594169Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.617277Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgfw2b6k6cm7rpfssvybcs, Database: , SessionId: ydb://session/3?node_id=4&id=YzFlNzczYTctYzk0ZWRlYTItNjUyNjM2YmUtYjk2YTU1Yjk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.617823Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-10-24T16:23:07.617907Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322987617878 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:07.617937Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-10-24T16:23:07.632157Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:07.632178Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.653745Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgfw3hcvsscpj2fksa9agk, Database: , SessionId: ydb://session/3?node_id=4&id=YzkyYTNhOGMtOTY4MzQyNS01MGY0MjFjNy1mYmFmOGJmMA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.654578Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:5] at 72075186224037888 2025-10-24T16:23:07.654657Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1761322987654630 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:07.654684Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-10-24T16:23:07.665099Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:07.665120Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.665740Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2802], serverId# [4:1033:2803], sessionId# [0:0:0] 2025-10-24T16:23:07.666956Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1034:2804], serverId# [4:1035:2805], sessionId# [0:0:0] >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> Viewer::PDiskMerging [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> Viewer::SelectStringWithBase64Encoding >> BootstrapperTest::MultipleBootstrappers >> Viewer::JsonAutocompleteEmpty ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-10-24T16:22:58.640080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.772934Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.773026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.773037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000931/r3tmp/tmpxZ2IRE/pdisk_1.dat 2025-10-24T16:22:59.005907Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:59.006800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:59.006831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:59.006938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322977192035 != 1761322977192038 2025-10-24T16:22:59.038012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:59.119649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:59.159252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:59.263758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:59.328870Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:22:59.330627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.394423Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.394462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.394644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:59.394654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:59.394661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:59.394719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.394743Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.394757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:22:59.408161Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.443225Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:59.443306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.443332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:22:59.443336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.443341Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:59.443345Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.443517Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:59.443539Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:59.443556Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.443561Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.443568Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:59.443573Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.443583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:22:59.443684Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:59.443731Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:59.443746Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:59.450586Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.461313Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:59.461357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.606705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:22:59.607946Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:22:59.607962Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.608019Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.608027Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:22:59.608037Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:22:59.608105Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:22:59.608131Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:22:59.610104Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.610122Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:22:59.619968Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:22:59.620083Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.620679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:22:59.620687Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.620729Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:22:59.620739Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.621035Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.621046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.621052Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:22:59.621067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:22:59.621083Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:22:59.621094Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.629203Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.630442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:22:59.630797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:22:59.630807Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:22:59.643199Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186 ... :810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:06.981902Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:06.981913Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:06.982342Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:06.982362Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:06.982399Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:06.989035Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:06.989083Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:23:06.990248Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-10-24T16:23:06.990265Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-10-24T16:23:06.990378Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.012701Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:07.100013Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:07.200852Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:07.200882Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.200969Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.200980Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:07.200992Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:07.201051Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-10-24T16:23:07.201085Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:07.201155Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.201982Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:07.236672Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-10-24T16:23:07.236706Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:07.236713Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:07.236724Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.236745Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:07.236757Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-10-24T16:23:07.236770Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.237255Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-10-24T16:23:07.237270Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:07.238711Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.238731Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.238754Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.238851Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.238908Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.239647Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:07.245252Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.427079Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.427547Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:23:07.451777Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.476418Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfvq6e0qndxt2fgxk9dx5, Database: , SessionId: ydb://session/3?node_id=4&id=OGUyZTBjODItY2Q0NjVkMTgtNTM1OTU0OTgtZTExNDFmODM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.477094Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:991:2777], serverId# [4:992:2778], sessionId# [0:0:0] 2025-10-24T16:23:07.477235Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-10-24T16:23:07.477329Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322987477289 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:07.477362Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-10-24T16:23:07.490974Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:07.490999Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.517834Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgfvz49pgrh1vzvrhqtkd3, Database: , SessionId: ydb://session/3?node_id=4&id=YzVmYjZkMDgtZmUzYmZiMmUtNTUxMWE4OTYtMzA2MmMzZWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:07.518440Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-10-24T16:23:07.518524Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322987518494 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:07.518552Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-10-24T16:23:07.531585Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:07.531617Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.532334Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1019:2796], serverId# [4:1020:2797], sessionId# [0:0:0] 2025-10-24T16:23:07.533395Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1021:2798], serverId# [4:1022:2799], sessionId# [0:0:0] |53.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |53.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlterNoVersion [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:37.056438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:37.056469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:37.056489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:37.056498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:37.056506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:37.056511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:37.056522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:37.056543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:37.056692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:37.056757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:37.108228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:37.108277Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:37.108397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:37.120476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:37.120630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:37.120696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:37.123765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:37.123937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:37.124051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:37.124141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:37.124970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:37.125031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:37.125318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:37.125330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:37.125355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:37.125364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:37.125370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:37.125403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:37.133339Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:37.179181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:37.179280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:37.179359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:37.179369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:37.179420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:37.179439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:37.184641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:37.184726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:37.184810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:37.184835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:37.184843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:37.184850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:37.185475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:37.185490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:37.185496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:37.185879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:37.185891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:37.185899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:37.185907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:37.186744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:37.192270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:37.192371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:37.192675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:37.192724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... meshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694028Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:89: TAlterFileStore::TConfigureParts operationId# 1002:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694255Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2025-10-24T16:23:07.694270Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2025-10-24T16:23:07.694293Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-10-24T16:23:07.694310Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2025-10-24T16:23:07.694315Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:43: TAlterFileStore::TConfigureParts operationId# 1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694320Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1002:0 3 -> 128 2025-10-24T16:23:07.694659Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694681Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694686Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:197: TAlterFileStore::TPropose operationId# 1002:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:07.694692Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2025-10-24T16:23:07.694715Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:07.694949Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-10-24T16:23:07.694968Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2025-10-24T16:23:07.695016Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695033Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 313532614767 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695041Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:153: TAlterFileStore::TPropose operationId# 1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695065Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:23:07.695069Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:07.695073Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:23:07.695075Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:07.695082Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:07.695089Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-10-24T16:23:07.695095Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:07.695099Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1002:0 2025-10-24T16:23:07.695103Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1002:0 2025-10-24T16:23:07.695120Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:23:07.695124Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2025-10-24T16:23:07.695127Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:07.695407Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695415Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:07.695433Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695437Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [73:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2025-10-24T16:23:07.695512Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:07.695520Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:07.695524Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:23:07.695530Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:07.695534Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:07.695546Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-10-24T16:23:07.695788Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-10-24T16:23:07.695860Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:23:07.695868Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:23:07.695973Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:23:07.695991Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:23:07.695996Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [73:418:2397] TestWaitNotification: OK eventTxId 1002 2025-10-24T16:23:07.696059Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:07.696081Z node 73 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 30us result status StatusSuccess 2025-10-24T16:23:07.696157Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-10-24T16:22:58.450341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.695154Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.695242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.695253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000944/r3tmp/tmpJ1o0Qg/pdisk_1.dat 2025-10-24T16:22:58.856255Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.857264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.857295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.857409Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976840525 != 1761322976840528 2025-10-24T16:22:58.889099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.981234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:59.045700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:59.122924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:59.165109Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:22:59.166557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.187178Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:22:59.187232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:59.188653Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.188687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.188905Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:59.188915Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:59.188923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:59.188979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.189008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.189022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:22:59.189088Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:59.189105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:59.189212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:22:59.189219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:22:59.189225Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:22:59.189254Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:59.189269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:59.189277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:22:59.202002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.220566Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:59.220648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.220677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:22:59.220683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.220687Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:59.220693Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.220730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:59.220738Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:22:59.220747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:59.220755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:22:59.220758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:22:59.220761Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:22:59.220764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:22:59.220911Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:59.220937Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:59.220942Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:22:59.220971Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:22:59.220996Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.221003Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.221066Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:59.221078Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.221088Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:22:59.221092Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.221095Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:22:59.221099Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:22:59.221219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2570], serverId# [1:690:2577], sessionId# [0:0:0] 2025-10-24T16:22:59.221226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:680:2571], serverId# [1:692:2578], sessionId# [0:0:0] 2025-10-24T16:22:59.221251Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:59.221302Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:59.221676Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:59.221787Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-10-24T16:22:59.221815Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:22:59.221852Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-10-24T16:22:59.222234Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.222248Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:22:59.236221Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:59.236264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.236286Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-10-24T16:22:59.236293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.387767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:740:2608], serverId# [1:743:2611], sessionId# [0:0:0] 2025-10-24T16:22:59.387820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server conne ... :810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:07.750129Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:07.750140Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.750456Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:07.750467Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:07.750489Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:07.760788Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:07.760829Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-10-24T16:23:07.760842Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-10-24T16:23:07.760847Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-10-24T16:23:07.760911Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:07.781876Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:07.842147Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:07.956173Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:07.956200Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:07.956273Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.956282Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:07.956290Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:07.956338Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-10-24T16:23:07.956363Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:07.956413Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:07.956576Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:08.000253Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-10-24T16:23:08.000283Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:08.000290Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:08.000301Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:08.000323Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:08.000335Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-10-24T16:23:08.000346Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:08.000765Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-10-24T16:23:08.000774Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:08.001991Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:08.002009Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:08.002030Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:08.002112Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:08.002158Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:08.003026Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:08.003895Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:08.190012Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:08.190503Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:23:08.214700Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:08.247583Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfwf1ft5ec3rxh2z7dzst, Database: , SessionId: ydb://session/3?node_id=4&id=ZjgwOWQ0YjYtOGE3ZTRlOTItNDk3ODI3ZGMtZTIwMTJiZQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:08.248133Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:985:2772], serverId# [4:986:2773], sessionId# [0:0:0] 2025-10-24T16:23:08.248253Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-10-24T16:23:08.248330Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322988248295 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:08.248358Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-10-24T16:23:08.268143Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:08.268169Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:08.307473Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgfwqdc3yggkhwq0ymskbf, Database: , SessionId: ydb://session/3?node_id=4&id=YjgwZDk2NjAtZWQ0YzFjOWQtNzBlMzE1MDEtZTY3YWNjNGU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:08.320317Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-10-24T16:23:08.320425Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1761322988320388 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:08.320460Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-10-24T16:23:08.332177Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:08.332205Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:08.332941Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1013:2791], serverId# [4:1014:2792], sessionId# [0:0:0] 2025-10-24T16:23:08.333853Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2793], serverId# [4:1016:2794], sessionId# [0:0:0] |53.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 >> TPipeCacheTest::TestTabletNode [GOOD] >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestConsumerSidePipeReset >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> TabletState::SeqNoSubscribeOutOfOrder |53.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |53.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |53.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> Viewer::Cluster10000Tablets >> Viewer::SelectStringWithNoBase64Encoding >> TFileStoreWithReboots::CreateAlter [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> TTabletPipeTest::TestConnectReject >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: 2025-10-24T16:22:58.434146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:22:58.478532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:22:58.478633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:22:58.478655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000975/r3tmp/tmpo1qi5h/pdisk_1.dat 2025-10-24T16:22:58.653587Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:58.654608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:22:58.654662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:22:58.654781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322976647341 != 1761322976647344 2025-10-24T16:22:58.686953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:22:58.768583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:58.816531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:22:58.906577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:22:58.925126Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:22:58.925219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:22:58.965909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:22:58.965936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:22:58.966073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:22:58.966083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:22:58.966090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:22:58.966145Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:22:58.966169Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:22:58.966182Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:22:58.976570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:22:58.998008Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:22:58.998081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:22:58.998113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:22:58.998118Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:58.998122Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:22:58.998127Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:58.998261Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:22:58.998283Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:22:58.998298Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:58.998304Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:58.998312Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:22:58.998324Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:58.998334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:22:58.998426Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:22:58.998476Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:22:58.998490Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:22:58.998809Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.009095Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:22:59.009132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:22:59.168721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:22:59.171158Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:22:59.171182Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.171260Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.171268Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:22:59.171280Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:22:59.171346Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:22:59.171375Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:22:59.180055Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:22:59.180094Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:22:59.181086Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:22:59.181499Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:22:59.181968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:22:59.181976Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.182014Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:22:59.182026Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.182276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:22:59.182284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:22:59.182288Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:22:59.182301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:22:59.182330Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:22:59.182341Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:22:59.183826Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:22:59.188503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:22:59.188630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:22:59.188641Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:22:59.197153Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186 ... G: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:09.883404Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:09.883415Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:09.883438Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:09.883452Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-10-24T16:23:09.883463Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:09.883939Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-10-24T16:23:09.883950Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:09.895471Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.895497Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:891:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.895532Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.895632Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:897:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.895687Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.896565Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:09.915669Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:10.087682Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:10.088203Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:23:10.110244Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:10.164998Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgfya7f0avjxv9wq9q3rf4, Database: , SessionId: ydb://session/3?node_id=4&id=NzYxZGFiNzktYzA5Mzg3NzEtODUzODFlNmEtMmM3Y2Q3ZjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:10.165655Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:985:2772], serverId# [4:986:2773], sessionId# [0:0:0] 2025-10-24T16:23:10.165787Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-10-24T16:23:10.165852Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1761322990165816 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:10.165888Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-10-24T16:23:10.180198Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-10-24T16:23:10.180223Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:10.196133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-10-24T16:23:10.196753Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:10.196806Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2025-10-24T16:23:10.196821Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2025-10-24T16:23:10.196827Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2025-10-24T16:23:10.207148Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:10.336882Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:10.336912Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:10.336956Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:10.336963Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:10.336971Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715662] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:10.337035Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715662 keys extracted: 0 2025-10-24T16:23:10.337061Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:10.337091Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:10.337109Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2025-10-24T16:23:10.337257Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 2500, txId# 281474976715662, at tablet# 72075186224037888 2025-10-24T16:23:10.337287Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 2500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-10-24T16:23:10.337366Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:10.337740Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-10-24T16:23:10.337750Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:10.338059Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:10.338072Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-10-24T16:23:10.338089Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:10.338100Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-10-24T16:23:10.338109Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-10-24T16:23:10.338113Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:10.338500Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-10-24T16:23:10.338511Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:10.340148Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1028:2810], serverId# [4:1029:2811], sessionId# [0:0:0] 2025-10-24T16:23:10.351421Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1031:2813], serverId# [4:1032:2814], sessionId# [0:0:0] |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig [GOOD] >> Viewer::LevenshteinDistance [GOOD] >> TFileStoreWithReboots::AlterAssignDrop [GOOD] >> Viewer::JsonStorageListingV2 >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TabletState::SeqNoSubscriptionReplace >> TResourceBroker::TestOverusage >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateAlter [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:42.722645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:42.722664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.722670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:42.722677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:42.722682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:42.722687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:42.722695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:42.722711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:42.722822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:42.722872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:42.745786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:42.745812Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:42.745905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.748422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:42.748499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:42.748535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:42.750904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:42.751032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:42.751120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.751175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:42.752089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.752132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:42.752318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:42.752327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:42.752349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:42.752355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:42.752360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:42.752380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:42.753568Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:42.774901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:42.774964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.775016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:42.775025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:42.775061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:42.775076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:42.775741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.775789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:42.775840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.775873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:42.775880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:42.775885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:42.776416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.776430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:42.776436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:42.776797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.776808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:42.776814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:42.776821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:42.777453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:42.777820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:42.777857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:42.778067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:42.778094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... meshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:11.570295Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:89: TAlterFileStore::TConfigureParts operationId# 1002:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:11.570545Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275054593 2025-10-24T16:23:11.570561Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72075186233409546 2025-10-24T16:23:11.570586Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-10-24T16:23:11.570603Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxId: 1002 Origin: 72075186233409546 Status: OK 2025-10-24T16:23:11.570608Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:43: TAlterFileStore::TConfigureParts operationId# 1002:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-10-24T16:23:11.570612Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1002:0 3 -> 128 2025-10-24T16:23:11.571092Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571112Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571117Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:197: TAlterFileStore::TPropose operationId# 1002:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571123Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1002 ready parts: 1/1 2025-10-24T16:23:11.571145Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:11.571366Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-10-24T16:23:11.571385Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000004 2025-10-24T16:23:11.571440Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571456Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 313532614767 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571461Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_fs.cpp:153: TAlterFileStore::TPropose operationId# 1002:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571484Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:23:11.571487Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:11.571492Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:23:11.571494Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:11.571503Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:11.571510Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-10-24T16:23:11.571514Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:23:11.571518Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1002:0 2025-10-24T16:23:11.571521Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1002:0 2025-10-24T16:23:11.571538Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:23:11.571542Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 1, subscribers: 0 2025-10-24T16:23:11.571546Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:11.571870Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571879Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:11.571906Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:11.571911Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [73:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2025-10-24T16:23:11.572009Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:11.572021Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:11.572028Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:23:11.572033Z node 73 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:11.572037Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:11.572051Z node 73 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-10-24T16:23:11.576176Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-10-24T16:23:11.576230Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:23:11.576236Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:23:11.576294Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:23:11.576308Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:23:11.576313Z node 73 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [73:418:2397] TestWaitNotification: OK eventTxId 1002 2025-10-24T16:23:11.576372Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:11.576398Z node 73 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_2" took 33us result status StatusSuccess 2025-10-24T16:23:11.576464Z node 73 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/FS_2" PathDescription { Self { Name: "FS_2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeFileStore CreateFinished: true CreateTxId: 1001 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 FileStoreVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } FileStoreDescription { Name: "FS_2" PathId: 3 IndexTabletId: 72075186233409546 Config { Version: 2 FolderId: "bar" CloudId: "baz" BlockSize: 4096 } Version: 2 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-10-24T16:23:05.593290Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.593301Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.593305Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:05.606899Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:05.616702Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:05.624941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.625114Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:05.630168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:05.630216Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:05.630852Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-10-24T16:23:05.630860Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:05.631362Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:05.631370Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-10-24T16:23:05.631380Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-10-24T16:23:05.631384Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-10-24T16:23:05.631827Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.631831Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.631835Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:05.633263Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:05.633368Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:05.633426Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.638502Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-10-24T16:23:05.647878Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:05.647922Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-10-24T16:23:05.649419Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-10-24T16:23:05.649436Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-10-24T16:23:05.649468Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:05.649474Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:05.649486Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:05.649726Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.649733Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:05.649736Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-10-24T16:23:05.649738Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:05.649755Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.649779Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-10-24T16:23:05.649782Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-10-24T16:23:05.649784Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:05.649792Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.649797Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-10-24T16:23:05.649799Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-10-24T16:23:05.649802Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:05.649815Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-10-24T16:23:05.650462Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.650466Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.650470Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:05.663915Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:05.674132Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:05.674462Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:05.674583Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-10-24T16:23:05.674763Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:05.674787Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-10-24T16:23:05.678618Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-10-24T16:23:05.678649Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-10-24T16:23:05.679891Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:05.679913Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:05.679952Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.679963Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:05.679966Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:05.679975Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.679980Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:05.679983Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-10-24T16:23:05.679992Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-10-24T16:23:05.679997Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-10-24T16:23:05.680000Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:11.623689Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-10-24T16:23:11.655269Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-10-24T16:23:11.655280Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-10-24T16:23:11.655284Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:11.671455Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:11.671658Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:11.757974Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-10-24T16:23:11.759154Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-10-24T16:23:11.828353Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-10-24T16:23:11.828530Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:11.829089Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:11.829902Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:11.830013Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-10-24T16:23:11.831482Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-10-24T16:23:11.831595Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-10-24T16:23:11.831688Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-10-24T16:23:11.831784Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-10-24T16:23:11.834739Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-10-24T16:23:11.834843Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-10-24T16:23:11.834864Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-10-24T16:23:11.834934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-10-24T16:23:11.836418Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-10-24T16:23:11.836852Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.836856Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.836858Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:11.836919Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:11.836998Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:11.837025Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.837065Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:11.837123Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-10-24T16:23:11.837270Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.837273Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.837275Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:11.837308Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:11.837363Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:11.837375Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.837468Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:11.837489Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:11.837502Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:11.837510Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-10-24T16:23:11.837532Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateWithIntermediateDirsForceDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:50.794551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:50.794574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:50.794580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:50.794589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:50.794598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:50.794602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:50.794612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:50.794707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:50.795085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:50.795263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:50.841104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:50.841155Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:50.841278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:50.845466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:50.845565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:50.845750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:50.849862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:50.850379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:50.850533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:50.850618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:50.851769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:50.851825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:50.852125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:50.852139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:50.852170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:50.852179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:50.852186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:50.852222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:50.853888Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:50.890187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:50.890304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:50.890387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:50.890397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:50.890443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:50.890459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:50.892321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:50.892384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:50.892459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:50.892480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:50.892487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:50.892493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:50.892985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:50.892996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:50.893003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:50.893355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:50.893364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:50.893369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:50.893374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:50.893864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:50.894114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:50.894157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:50.894358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:50.894378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... PublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:11.710217Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-10-24T16:23:11.710221Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2025-10-24T16:23:11.710225Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1005, path id: 6 2025-10-24T16:23:11.710230Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [49:213:2214], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-10-24T16:23:11.710312Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710322Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710325Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:11.710330Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-10-24T16:23:11.710333Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:23:11.710403Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710410Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710413Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:11.710416Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-10-24T16:23:11.710419Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:23:11.710503Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710512Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710516Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:11.710519Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-10-24T16:23:11.710523Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:23:11.710749Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710761Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.710765Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:11.710770Z node 49 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-10-24T16:23:11.710773Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:23:11.710782Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 1 2025-10-24T16:23:11.710786Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [49:362:2352] 2025-10-24T16:23:11.710892Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:23:11.710904Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 Leader for TabletID 72057594037968897 is [49:225:2223] sender: [49:400:2058] recipient: [49:15:2062] 2025-10-24T16:23:11.711375Z node 49 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2025-10-24T16:23:11.711426Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:11.711475Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-10-24T16:23:11.711529Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:11.711534Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-10-24T16:23:11.711544Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-10-24T16:23:11.711549Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:23:11.711554Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:23:11.711560Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:23:11.711565Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:11.711668Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.711703Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.711866Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.711927Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:11.711937Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-10-24T16:23:11.711942Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [49:363:2353] 2025-10-24T16:23:11.712266Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:11.712306Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 TestWaitNotification: OK eventTxId 1005 2025-10-24T16:23:11.712385Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/x" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:11.712412Z node 49 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/x" took 35us result status StatusPathDoesNotExist 2025-10-24T16:23:11.712441Z node 49 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/x\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/x" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-10-24T16:23:11.661599Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:11.661699Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:11.661708Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:11.661716Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:11.661721Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:11.661735Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:11.661744Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:11.661748Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:11.661753Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:11.661759Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:105:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:11.661763Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:11.661767Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:23:11.661771Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:11.661972Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_compaction1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-10-24T16:23:11.662023Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:11.662028Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:11.662033Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:11.662040Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_default from 0.000000 to 176132299166.100006 2025-10-24T16:23:11.662045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 500.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:11.662050Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:11.662054Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:11.662059Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:11.662067Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:11.662070Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:11.662074Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 200.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:11.662082Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-10-24T16:23:11.662124Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default1" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-10-24T16:23:11.662134Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-10-24T16:23:11.662162Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown1" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-10-24T16:23:11.662170Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2025-10-24T16:23:11.662197Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default1" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-10-24T16:23:11.662204Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" 2025-10-24T16:23:11.662230Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 5 Limit { Resource: 400 } } Queues { Name: "queue_compaction0" Weight: 50 Limit { Resource: 400 } } Queues { Name: "queue_compaction1" Weight: 20 Limit { Resource: 400 } } Queues { Name: "queue_scan" Weight: 20 Limit { Resource: 400 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 5000000 } Tasks { Name: "compaction0" QueueName: "queue_compaction0" DefaultDuration: 10000000 } Tasks { Name: "compaction1" QueueName: "queue_default" DefaultDuration: 20000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 20000000 } ResourceLimit { Resource: 1000 Resource: 1000 } 2025-10-24T16:23:11.662260Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:11.662266Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_compaction0 from 0.000000 to 7045291966.644000 2025-10-24T16:23:11.662271Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 40.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:11.662275Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:11.662280Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_default from 0.000000 to 70452919666.440002 2025-10-24T16:23:11.662284Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 800.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:11.662287Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:11.662292Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 800.000000 to 70452920166.440002 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:11.662297Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-10-24T16:23:12.148608Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:12.149018Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:105:2138]) priority=5 resources={500, 0} 2025-10-24T16:23:12.149026Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.149033Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 0} for task task-1 (1 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:12.149038Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.149050Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:12.149058Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={500, 0} 2025-10-24T16:23:12.149061Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.149066Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:12.149079Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 0}) 2025-10-24T16:23:12.149084Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 2025-10-24T16:23:12.149089Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 0} for task task-2 (2 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:12.149092Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.149096Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1000.000000 (insert task task-2 (2 by [2:105:2138])) 2025-10-24T16:23:12.149103Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-2 (2 by [2:105:2138]) (release resources {500, 0}) 2025-10-24T16:23:12.149108Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1000.000000 to 500.000000 (remove task task-2 (2 by [2:105:2138])) 2025-10-24T16:23:12.149112Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 500.000000 2025-10-24T16:23:12.149118Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [2:105:2138]) priority=5 resources={250, 0} 2025-10-24T16:23:12.149121Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.149125Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 0} for task task-3 (3 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:12.149128Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.149132Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 500.000000 to 987.500000 (insert task task-3 (3 by [2:105:2138])) 2025-10-24T16:23:12.149138Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new scan task task-4 (4 by [2:105:2138]) priority=5 resources={0, 800} 2025-10-24T16:23:12.149142Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_scan 2025-10-24T16:23:12.149147Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction1 from 500.000000 to 750.000000 (in-fly consumption {250, 0}) 2025-10-24T16:23:12.149152Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [2:105:2138]) 2025-10-24T16:23:12.149157Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={250, 0} 2025-10-24T16:23:12.149160Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.149165Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:663: Updated real resource usage for queue queue_compaction1 from 750.000000 to 1000.000000 (in-fly consumption {250, 0}) 2025-10-24T16:23:12.149168Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [2:105:2138]) 2025-10-24T16:23:12.149171Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 0} for task task-5 (5 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:12.149175Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.149179Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 1500.000000 (insert task task-5 (5 by [2:105:2138])) >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::AlterAssignDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:54.475204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:54.475222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.475226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:54.475231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:54.475236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:54.475238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:54.475244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.475450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:54.476366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:54.477027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:54.578444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:54.578467Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.578554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.584889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:54.584965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:54.585347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:54.591545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:54.593182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:54.593405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.593517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:54.595225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.595266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:54.595464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.595472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.595492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:54.595498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:54.595503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:54.595523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:54.597234Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.674311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:54.674365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.674411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:54.674416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:54.674451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:54.674463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:54.674957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.674997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:54.675034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.675049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:54.675053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:54.675058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:54.675334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.675343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:54.675348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:54.675586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.675593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.675597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.675603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:54.676176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:54.676648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:54.677167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:54.677338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.677359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... 3:12.022932Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-10-24T16:23:12.022938Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:12.022944Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:12.022949Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-10-24T16:23:12.022954Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-10-24T16:23:12.022958Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1005:0 2025-10-24T16:23:12.022961Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1005:0 2025-10-24T16:23:12.022972Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:12.022977Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2025-10-24T16:23:12.022980Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-10-24T16:23:12.022983Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:23:12.023196Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:12.023206Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:12.023423Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:23:12.023435Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:12.023479Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:12.023484Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:12.023504Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:12.023520Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:12.023524Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [42:211:2212], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-10-24T16:23:12.023528Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [42:211:2212], at schemeshard: 72057594046678944, txId: 1005, path id: 3 FAKE_COORDINATOR: Erasing txId 1005 2025-10-24T16:23:12.023599Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.023607Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.023611Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:12.023615Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:23:12.023619Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:12.023662Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:12.023666Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:12.023673Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:23:12.023702Z node 42 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2025-10-24T16:23:12.023720Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.023726Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.023730Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-10-24T16:23:12.023733Z node 42 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-10-24T16:23:12.023736Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:12.023743Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-10-24T16:23:12.023763Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:12.024095Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.024151Z node 42 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:12.024357Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-10-24T16:23:12.024371Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-10-24T16:23:12.024421Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-10-24T16:23:12.024426Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-10-24T16:23:12.024469Z node 42 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-10-24T16:23:12.024481Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-10-24T16:23:12.024485Z node 42 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [42:454:2433] TestWaitNotification: OK eventTxId 1005 2025-10-24T16:23:12.024538Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/FS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:12.024557Z node 42 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/FS" took 26us result status StatusPathDoesNotExist 2025-10-24T16:23:12.024583Z node 42 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/FS\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/FS" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted wait until 72075186233409550 is deleted 2025-10-24T16:23:12.024630Z node 42 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-10-24T16:23:12.024639Z node 42 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-10-24T16:23:12.024646Z node 42 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-10-24T16:23:12.024654Z node 42 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 2025-10-24T16:23:12.024661Z node 42 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409550 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 Deleted tabletId 72075186233409550 >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> TabletState::SeqNoSubscriptionReplace [GOOD] >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletLabeledCountersAggregator::Version3Aggregation >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TSchemeShardSysViewTest::ReadOnlyMode >> TResourceBroker::TestNotifyActorDied [GOOD] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] |53.7%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TTabletPipeTest::TestInterconnectSession |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TResourceBroker::TestErrors >> TFileStoreWithReboots::CreateDrop [GOOD] >> BootstrapperTest::KeepExistingTablet |53.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:168:2058] recipient: [3:166:2141] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:168:2058] recipient: [3:166:2141] Leader for TabletID 9437184 is [3:174:2145] sender: [3:175:2058] recipient: [3:166:2141] Leader for TabletID 9437185 is [0:0:0] sender: [4:178:2049] recipient: [4:169:2098] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:178:2049] recipient: [4:169:2098] Leader for TabletID 9437185 is [4:190:2101] sender: [4:191:2049] recipient: [4:169:2098] Leader for TabletID 9437184 is [3:174:2145] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [3:220:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [4:222:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:190:2101] sender: [3:225:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:190:2101] sender: [4:223:2049] recipient: [4:163:2097] Leader for TabletID 9437185 is [4:190:2101] sender: [4:228:2049] recipient: [4:227:2114] Leader for TabletID 9437185 is [4:229:2115] sender: [4:230:2049] recipient: [4:227:2114] Leader for TabletID 9437185 is [4:229:2115] sender: [3:260:2058] recipient: [3:15:2062] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] Test command err: 2025-10-24T16:23:12.867168Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:12.867281Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [1:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:12.867291Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.867298Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:12.867304Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.867317Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 100.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:12.867327Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={410, 410} 2025-10-24T16:23:12.867331Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.867336Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_compaction0 due to exceeded limits 2025-10-24T16:23:12.867343Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-3 (3 by [1:105:2138]) priority=5 resources={550, 550} 2025-10-24T16:23:12.867347Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.867352Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:23:12.867356Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:12.867370Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {50, 50}) 2025-10-24T16:23:12.867377Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-10-24T16:23:12.867383Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {550, 550} for task task-3 (3 by [1:105:2138]) from queue queue_compaction1 2025-10-24T16:23:12.867387Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction1 2025-10-24T16:23:12.867392Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 1100.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:12.867398Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:12.867404Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-3 (3 by [1:105:2138]) (release resources {550, 550}) 2025-10-24T16:23:12.867410Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1100.000000 to 550.000000 (remove task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:12.867415Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 0.000000 to 550.000000 2025-10-24T16:23:12.867419Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {410, 410} for task task-2 (2 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:12.867423Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:12.867428Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 920.000000 (insert task task-2 (2 by [1:105:2138])) 2025-10-24T16:23:13.139113Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:13.139177Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-1 (1 by [2:104:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:13.139184Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:13.139189Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:104:2138]) from queue queue_compaction0 2025-10-24T16:23:13.139193Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:104:2138]) to queue queue_compaction0 2025-10-24T16:23:13.139206Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:104:2138])) 2025-10-24T16:23:13.139213Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:104:2138]) priority=5 resources={200, 200} 2025-10-24T16:23:13.139215Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:104:2138]) to queue queue_compaction1 2025-10-24T16:23:13.139219Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:13.139222Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2139]) priority=5 resources={200, 200} 2025-10-24T16:23:13.139225Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2139]) to queue queue_compaction0 2025-10-24T16:23:13.139227Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:13.139229Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:13.139233Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2139]) priority=5 resources={200, 200} 2025-10-24T16:23:13.139235Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2139]) to queue queue_compaction1 2025-10-24T16:23:13.139238Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:13.139240Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:13.139247Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:104:2138]) (release resources {500, 500}) 2025-10-24T16:23:13.139252Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction0 from 1000.000000 to 100.000000 (remove task task-1 (1 by [2:104:2138])) 2025-10-24T16:23:13.139255Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 100.000000 2025-10-24T16:23:13.139258Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task task-2 (2 by [2:104:2138]) 2025-10-24T16:23:13.139261Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-4 (4 by [2:105:2139]) from queue queue_compaction1 2025-10-24T16:23:13.139264Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [2:105:2139]) to queue queue_compaction1 2025-10-24T16:23:13.139267Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 0.000000 to 400.000000 (insert task task-4 (4 by [2:105:2139])) 2025-10-24T16:23:13.139270Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {200, 200} for task task-3 (3 by [2:105:2139]) from queue queue_compaction0 2025-10-24T16:23:13.139273Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2139]) to queue queue_compaction0 2025-10-24T16:23:13.139276Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 100.000000 to 500.000000 (insert task task-3 (3 by [2:105:2139])) |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |53.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |53.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> BootstrapperTest::MultipleBootstrappers [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TResourceBroker::TestExecutionStat [GOOD] >> Compression::WriteRAW [GOOD] >> RemoteTopicReader::ReadTopic >> Compression::WriteGZIP >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2025-10-24T16:23:13.268322Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2025-10-24T16:23:13.272389Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-10-24T16:23:13.274119Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-10-24T16:23:13.275004Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:134:2158] 2025-10-24T16:23:13.275018Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:134:2158] 2025-10-24T16:23:13.275075Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:134:2158] 2025-10-24T16:23:13.275083Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:134:2158] 2025-10-24T16:23:13.275102Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:134:2158] 2025-10-24T16:23:13.275107Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:134:2158] 2025-10-24T16:23:13.275120Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:134:2158] 2025-10-24T16:23:13.275146Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:134:2158] Type# 269877249 Reason# ActorUnknown 2025-10-24T16:23:13.275175Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:137:2160] 2025-10-24T16:23:13.275179Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:137:2160] 2025-10-24T16:23:13.275187Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:137:2160] 2025-10-24T16:23:13.275192Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:137:2160] 2025-10-24T16:23:13.275197Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:137:2160] 2025-10-24T16:23:13.275201Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:137:2160] 2025-10-24T16:23:13.275208Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:137:2160] 2025-10-24T16:23:13.275220Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:137:2160] Type# 269877249 Reason# ActorUnknown 2025-10-24T16:23:13.275233Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:139:2162] 2025-10-24T16:23:13.275237Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:139:2162] 2025-10-24T16:23:13.275244Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:139:2162] 2025-10-24T16:23:13.275248Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:139:2162] 2025-10-24T16:23:13.275253Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:139:2162] 2025-10-24T16:23:13.275256Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:139:2162] 2025-10-24T16:23:13.275262Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:139:2162] 2025-10-24T16:23:13.275275Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:139:2162] Type# 269877249 Reason# ActorUnknown |53.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> BootstrapperTest::KeepExistingTablet [GOOD] >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript >> BootstrapperTest::DuplicateNodes |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> TSchemeShardSysViewTest::AsyncCreateSameSysView >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2025-10-24T16:23:14.062045Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews >> RemoteTopicReader::PassAwayOnCreatingReadSession >> TSchemeShardSysViewTest::DropSysView |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:13.451281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:13.451316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:13.451322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:13.451330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:13.451337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:13.451342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:13.451355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:13.451370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:13.451509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:13.451580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:13.468551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:13.468580Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:13.473019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:13.473064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:13.473124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:13.475064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:13.475281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:13.475414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:13.475512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:13.477362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:13.477419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:13.477716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:13.477732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:13.477761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:13.477771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:13.477777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:13.477810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:13.512964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:13.513379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:13.846708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-10-24T16:23:13.846749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2025-10-24T16:23:13.846913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:13.846932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:13.846943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2025-10-24T16:23:13.846975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-10-24T16:23:13.847008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-10-24T16:23:13.847024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:23:13.847430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:13.847439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:13.847533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-10-24T16:23:13.847556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:13.847562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:826:2783], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:23:13.847568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:826:2783], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-10-24T16:23:13.847662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:13.847671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:23:13.847686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:13.847692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:13.847697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:13.847701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:13.847706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:23:13.847712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:13.847717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:23:13.847722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:23:13.847736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-10-24T16:23:13.847746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:23:13.847751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-10-24T16:23:13.847755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-10-24T16:23:13.847872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:13.847886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:13.847892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:13.847897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-10-24T16:23:13.847902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-10-24T16:23:13.847974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:13.847984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:13.847988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:13.847992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-10-24T16:23:13.847996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-10-24T16:23:13.848004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:23:13.856646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:13.856730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:23:13.856802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:13.856811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:13.856911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:13.856953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:13.856959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:857:2812] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:13.857048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:13.857106Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 78us result status StatusSuccess 2025-10-24T16:23:13.857212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-10-24T16:23:09.811494Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:09.811513Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:09.811522Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:09.811666Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-10-24T16:23:09.811673Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-10-24T16:23:09.811709Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-10-24T16:23:09.811714Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-10-24T16:23:09.811720Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-10-24T16:23:09.811724Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-10-24T16:23:09.812007Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-10-24T16:23:09.812031Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-10-24T16:23:09.812037Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-10-24T16:23:09.812045Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-10-24T16:23:09.812063Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-10-24T16:23:09.812072Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-10-24T16:23:09.812077Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-10-24T16:23:09.812108Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-10-24T16:23:09.812113Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-10-24T16:23:10.060401Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:10.060597Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:10.060708Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:10.060714Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-10-24T16:23:10.127329Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:10.127698Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:10.127776Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:10.127780Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-10-24T16:23:11.153946Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-10-24T16:23:11.153976Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-10-24T16:23:11.154090Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-10-24T16:23:11.154098Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:11.154389Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-10-24T16:23:11.154394Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:11.155990Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:11.156074Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:11.156296Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:11.156300Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-10-24T16:23:11.156313Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:11.156316Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-10-24T16:23:12.019383Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-10-24T16:23:12.019411Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-10-24T16:23:12.019524Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-10-24T16:23:12.019536Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:12.019572Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-10-24T16:23:12.019578Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:12.020248Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:12.020355Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-10-24T16:23:12.020652Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-10-24T16:23:12.020663Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2025-10-24T16:23:12.020674Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-10-24T16:23:12.020679Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2025-10-24T16:23:12.020892Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-10-24T16:23:12.020914Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-10-24T16:23:12.020920Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-10-24T16:23:12.020932Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-10-24T16:23:12.020975Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-10-24T16:23:12.020978Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-10-24T16:23:12.854449Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-10-24T16:23:12.854471Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:12.854489Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-10-24T16:23:12.854497Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:12.854659Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] 2025-10-24T16:23:12.854696Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:288:2099] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-10-24T16:23:12.854823Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-10-24T16:23:12.854831Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2025-10-24T16:23:12.854900Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-10-24T16:23:12.854904Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2025-10-24T16:23:12.854941Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-10-24T16:23:12.854947Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2025-10-24T16:23:12.854983Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-10-24T16:23:12.854987Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-10-24T16:23:12.855026Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-10-24T16:23:12.855031Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-10-24T16:23:12.855075Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-10-24T16:23:12.855081Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 5 (owner) 2025-10-24T16:23:12.855373Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2025-10-24T16:23:12.855384Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:12.856480Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:443:2099] 2025-10-24T16:23:12.860104Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:12.860120Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect >> Viewer::JsonAutocompleteSchemePOST [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:39.057183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:39.057203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:39.057209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:39.057217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:39.057223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:39.057227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:39.057236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:39.057470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:39.058973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:39.065189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:39.525270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:39.525308Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:39.525408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:39.560405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:39.560528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:39.561342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:39.612137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:39.614421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:39.614952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:39.615033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:39.636025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:39.636101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:39.636394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:39.636403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:39.636425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:39.636433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:39.636439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:39.636470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:39.648397Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:39.880288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:39.880364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:39.880428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:39.880437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:39.880473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:39.880486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:39.888270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:39.888325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:39.888383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:39.888402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:39.888408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:39.888413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:39.892110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:39.892123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:39.892130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:39.900134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:39.900147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:39.900154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:39.900162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:39.902916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:39.908037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:39.908085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:39.908315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:39.908348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... ne, operation id: 1002:0 2025-10-24T16:23:13.394133Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1002:0 2025-10-24T16:23:13.394154Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:13.394160Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 3, subscribers: 0 2025-10-24T16:23:13.394166Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:13.394170Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-10-24T16:23:13.394174Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:23:13.394534Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:13.394547Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:13.394763Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:23:13.394777Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:13.394801Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:13.394806Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:13.394844Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:13.394853Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:13.394877Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:13.394882Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [85:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-10-24T16:23:13.394887Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [85:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2025-10-24T16:23:13.394891Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [85:213:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 3 FAKE_COORDINATOR: Erasing txId 1002 2025-10-24T16:23:13.395043Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395055Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395060Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:23:13.395065Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:23:13.395073Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:13.395121Z node 85 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2025-10-24T16:23:13.395146Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:13.395151Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:13.395160Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:13.395195Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395206Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395210Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:23:13.395214Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:13.395218Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:13.395245Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:13.395311Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395322Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.395326Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:23:13.395330Z node 85 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-10-24T16:23:13.395334Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:13.395342Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-10-24T16:23:13.395636Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.396071Z node 85 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:13.396091Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-10-24T16:23:13.396105Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:13.396116Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-10-24T16:23:13.396185Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:23:13.396192Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:23:13.396248Z node 85 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:23:13.396265Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:23:13.396270Z node 85 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [85:421:2400] TestWaitNotification: OK eventTxId 1002 2025-10-24T16:23:13.396335Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/FS_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:13.396361Z node 85 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/FS_3" took 37us result status StatusPathDoesNotExist 2025-10-24T16:23:13.396397Z node 85 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/FS_3\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/DirA/FS_3" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView >> TSchemeShardSysViewTest::DropSysView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] Test command err: 2025-10-24T16:23:13.938663Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:13.939091Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [1:105:2138]) priority=5 resources={400, 400} 2025-10-24T16:23:13.939099Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:13.939108Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {400, 400} for task task-1 (1 by [1:105:2138]) from queue queue_default 2025-10-24T16:23:13.939113Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [1:105:2138]) to queue queue_default 2025-10-24T16:23:13.939124Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 800.000000 (insert task task-1 (1 by [1:105:2138])) 2025-10-24T16:23:13.939141Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:13.939145Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939150Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:13.939156Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:13.939160Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939164Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:13.939170Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-4 (4 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:13.939173Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939177Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:13.939182Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-2 (2 by [1:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:13.939186Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:408: SubmitTask failed for task 2 to [1:105:2138]: task with the same ID has been already submitted 2025-10-24T16:23:13.939199Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:494: RemoveQueuedTask failed for task 1 to [1:105:2138]: cannot remove in-fly task 2025-10-24T16:23:13.939207Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:486: RemoveQueuedTask failed for task 5 to [1:105:2138]: cannot remove unknown task 2025-10-24T16:23:13.939212Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:486: RemoveQueuedTask failed for task 2 to [1:106:2139]: cannot remove unknown task 2025-10-24T16:23:13.939218Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task task-2 (2 by [1:105:2138]) 2025-10-24T16:23:13.939223Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-3 (3 by [1:105:2138]) 2025-10-24T16:23:13.939230Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:433: UpdateTask failed for task 2 to [1:105:2138]: cannot update unknown task 2025-10-24T16:23:13.939236Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:433: UpdateTask failed for task 4 to [1:106:2139]: cannot update unknown task 2025-10-24T16:23:13.939241Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-4 (4 by [1:105:2138]) (priority=4 type=compaction0 resources={250, 250} resubmit=0) 2025-10-24T16:23:13.939245Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939250Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:105:2138]) 2025-10-24T16:23:13.939254Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task task-3 (3 by [1:105:2138]) (priority=6 type=compaction0 resources={250, 250} resubmit=0) 2025-10-24T16:23:13.939258Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939261Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-4 (4 by [1:105:2138]) 2025-10-24T16:23:13.939267Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 5 to [1:105:2138]: cannot finish unknown task 2025-10-24T16:23:13.939273Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:515: FinishTask failed for task 2 to [1:106:2139]: cannot finish unknown task 2025-10-24T16:23:13.939278Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:523: FinishTask failed for task 3 to [1:105:2138]: cannot finish queued task 2025-10-24T16:23:13.939285Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [1:105:2138]) (release resources {400, 400}) 2025-10-24T16:23:13.939291Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_default from 0.000000 to 1600.000000 2025-10-24T16:23:13.939296Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-4 (4 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:13.939300Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939304Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 0.000000 to 500.000000 (insert task task-4 (4 by [1:105:2138])) 2025-10-24T16:23:13.939318Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:630: Skip queue queue_compaction0 due to exceeded limits 2025-10-24T16:23:13.939324Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-4 (4 by [1:105:2138]) (release resources {250, 250}) 2025-10-24T16:23:13.939328Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction0 from 0.000000 to 500.000000 2025-10-24T16:23:13.939331Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {250, 250} for task task-3 (3 by [1:105:2138]) from queue queue_compaction0 2025-10-24T16:23:13.939335Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [1:105:2138]) to queue queue_compaction0 2025-10-24T16:23:13.939339Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 500.000000 to 1000.000000 (insert task task-3 (3 by [1:105:2138])) 2025-10-24T16:23:14.499260Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-10-24T16:23:14.499645Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [2:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:14.499655Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:23:14.499665Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:105:2138]) from queue queue_default 2025-10-24T16:23:14.499670Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:23:14.499681Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 1000.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.499690Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499694Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.499699Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499704Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499708Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.499711Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499714Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.499720Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499723Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.499726Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499729Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.499735Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499738Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.499741Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499744Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.499749Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499753Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.499756Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499759Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.499764Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.499769Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.499773Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.499776Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.499790Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 500}) 2025-10-24T16:23:14.499796Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated plann ... urce_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500503Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1150.000000 to 1207.500000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500508Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-10-24T16:23:14.500512Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1207.500000 to 1200.000000 (remove task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500516Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1150.000000 to 1200.000000 2025-10-24T16:23:14.500522Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-1 (1 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500525Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500528Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:14.500531Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500535Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1200.000000 to 1255.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500541Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-10-24T16:23:14.500546Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1255.000000 to 1250.000000 (remove task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500552Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1200.000000 to 1250.000000 2025-10-24T16:23:14.500557Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-1 (1 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500560Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500564Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-1 (1 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:14.500567Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500571Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1250.000000 to 1302.500000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500576Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {50, 50}) 2025-10-24T16:23:14.500580Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_compaction1 from 1302.500000 to 1300.000000 (remove task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500583Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_compaction1 from 1250.000000 to 1300.000000 2025-10-24T16:23:14.500589Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new unknown task task-1 (1 by [2:105:2138]) priority=5 resources={500, 500} 2025-10-24T16:23:14.500592Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:23:14.500595Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {500, 500} for task task-1 (1 by [2:105:2138]) from queue queue_default 2025-10-24T16:23:14.500599Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-1 (1 by [2:105:2138]) to queue queue_default 2025-10-24T16:23:14.500603Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_default from 0.000000 to 950.000000 (insert task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500608Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-2 (2 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500611Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500614Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500619Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-3 (3 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500622Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500626Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:706: Updated real resource usage for queue queue_compaction0 from 300.000000 to 1300.000000 2025-10-24T16:23:14.500629Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500635Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.500640Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-4 (4 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500643Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500646Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500649Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.500654Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-5 (5 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500657Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500661Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500663Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.500668Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction1 task task-6 (6 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500671Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500675Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500678Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.500683Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new compaction0 task task-7 (7 by [2:105:2138]) priority=5 resources={50, 50} 2025-10-24T16:23:14.500686Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500689Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task task-2 (2 by [2:105:2138]) 2025-10-24T16:23:14.500692Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:608: Skip queue queue_compaction0 blocked by an earlier queue 2025-10-24T16:23:14.500697Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task task-1 (1 by [2:105:2138]) (release resources {500, 500}) 2025-10-24T16:23:14.500701Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_default from 950.000000 to 0.000000 (remove task task-1 (1 by [2:105:2138])) 2025-10-24T16:23:14.500704Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-2 (2 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:14.500708Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-2 (2 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500712Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1300.000000 to 1350.000000 (insert task task-2 (2 by [2:105:2138])) 2025-10-24T16:23:14.500716Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-3 (3 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:14.500720Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-3 (3 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500724Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 300.000000 to 1400.000000 (insert task task-3 (3 by [2:105:2138])) 2025-10-24T16:23:14.500729Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-4 (4 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:14.500733Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-4 (4 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500737Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1350.000000 to 1400.000000 (insert task task-4 (4 by [2:105:2138])) 2025-10-24T16:23:14.500740Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-6 (6 by [2:105:2138]) from queue queue_compaction1 2025-10-24T16:23:14.500743Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-6 (6 by [2:105:2138]) to queue queue_compaction1 2025-10-24T16:23:14.500747Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction1 from 1400.000000 to 1450.000000 (insert task task-6 (6 by [2:105:2138])) 2025-10-24T16:23:14.500751Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-5 (5 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:14.500754Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-5 (5 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500758Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1400.000000 to 1500.000000 (insert task task-5 (5 by [2:105:2138])) 2025-10-24T16:23:14.500761Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {50, 50} for task task-7 (7 by [2:105:2138]) from queue queue_compaction0 2025-10-24T16:23:14.500765Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task task-7 (7 by [2:105:2138]) to queue queue_compaction0 2025-10-24T16:23:14.500769Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_compaction0 from 1500.000000 to 1600.000000 (insert task task-7 (7 by [2:105:2138])) >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> BootstrapperTest::DuplicateNodes [GOOD] >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-10-24T16:23:09.988117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:10.024962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:10.025052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:10.025090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:10.095786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:10.095832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:10.099925Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:10.100458Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322988846764 != 1761322988846768 2025-10-24T16:23:10.132470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25784, node 1 TClient is connected to server localhost:29082 2025-10-24T16:23:10.514823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:10.514843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:10.514848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:10.515114Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:11.744461Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:11.745943Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:11.746952Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:11.747012Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:11.747020Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:11.806607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:11.806646Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:11.809844Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:11.810272Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1761322991009324 != 1761322991009328 2025-10-24T16:23:11.846219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14564, node 2 TClient is connected to server localhost:61241 2025-10-24T16:23:12.056052Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:12.056072Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:12.056077Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:12.056188Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:13.185959Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:13.188679Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:13.188751Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:13.188763Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:13.252705Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:13.252746Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:13.256185Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:13.256817Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1761322992572138 != 1761322992572141 2025-10-24T16:23:13.292416Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18138, node 3 TClient is connected to server localhost:64672 2025-10-24T16:23:13.380215Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:13.380235Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:13.380240Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:13.380487Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:14.551089Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:14.553971Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:14.553994Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:14.554044Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:14.620639Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:14.620683Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:14.628939Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:14.629333Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1761322993896075 != 1761322993896079 2025-10-24T16:23:14.664432Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16402, node 4 TClient is connected to server localhost:15342 2025-10-24T16:23:14.778490Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:14.778510Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:14.778515Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:14.778618Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:16.246573Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:16.248175Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:16.249247Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:16.249329Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:16.249339Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:16.300155Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:16.300196Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:16.303756Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.304266Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1761322995373106 != 1761322995373110 2025-10-24T16:23:16.341177Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24207, node 5 TClient is connected to server localhost:6390 2025-10-24T16:23:16.438563Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:16.438582Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:16.438587Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:16.438727Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-10-24T16:23:16.348305Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:16.348338Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:16.348444Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-10-24T16:23:16.348451Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-10-24T16:23:16.348469Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-10-24T16:23:16.348473Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-10-24T16:23:16.350326Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-10-24T16:23:16.351629Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-10-24T16:23:16.351735Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-10-24T16:23:16.351743Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-10-24T16:23:16.907253Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-10-24T16:23:16.907412Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:223:2098] 2025-10-24T16:23:16.907500Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-10-24T16:23:16.907506Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:16.766260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:16.766583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.766590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:16.766598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:16.766604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:16.766608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:16.766885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.766900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:16.767511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:16.768361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:16.947096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:16.947124Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.972715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:16.973028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:16.973069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:16.991368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:16.991864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:16.992005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:16.992110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:17.001300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:17.001365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:17.001613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:17.001624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:17.001650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:17.001658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:17.001664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:17.001705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.070123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.070506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Finished: true CreateTxId: 281474976710675 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000018 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710692 CreateStep: 5000016 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 25 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710680 CreateStep: 5000030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:17.327279Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:684:2673] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:23:17.332021Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:23:17.332141Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:17.332172Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 36us result status StatusSuccess 2025-10-24T16:23:17.332240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:17.332288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:17.332305Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 19us result status StatusSuccess 2025-10-24T16:23:17.332350Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:17.332383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:17.332396Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 14us result status StatusSuccess 2025-10-24T16:23:17.332430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 34 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710689 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 34 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> Viewer::JsonAutocompleteColumns [GOOD] >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:16.720642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:16.720879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.720885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:16.720893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:16.720900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:16.720904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:16.721135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.721149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:16.721715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:16.722415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:16.871618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:16.871647Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.895365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:16.895404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:16.896436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:16.900700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:16.900842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:16.900935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:16.901497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:16.903706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.903749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:16.904123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:16.904135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.904159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:16.904167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:16.904172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:16.904198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.059327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.059754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6:23:17.407915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-10-24T16:23:17.407919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-10-24T16:23:17.407923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2025-10-24T16:23:17.407926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2025-10-24T16:23:17.407930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2025-10-24T16:23:17.407933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-10-24T16:23:17.407937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-10-24T16:23:17.407940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-10-24T16:23:17.407944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-10-24T16:23:17.407947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-10-24T16:23:17.407951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-10-24T16:23:17.407955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-10-24T16:23:17.407959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-10-24T16:23:17.407962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-10-24T16:23:17.407967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-10-24T16:23:17.407971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-10-24T16:23:17.407974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-10-24T16:23:17.407978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-10-24T16:23:17.407981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-10-24T16:23:17.407984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-10-24T16:23:17.407988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-10-24T16:23:17.407991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-10-24T16:23:17.407995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-10-24T16:23:17.407999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-10-24T16:23:17.408003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-10-24T16:23:17.408007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-10-24T16:23:17.408010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-10-24T16:23:17.408014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-10-24T16:23:17.408017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-10-24T16:23:17.408035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2092: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2152: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2210: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2296: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2362: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.408395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.413027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:17.420698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:17.420717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:17.420756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:17.420765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:17.420770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:17.420787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:745:2731] sender: [1:805:2058] recipient: [1:15:2062] 2025-10-24T16:23:17.492343Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:17.492399Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 75us result status StatusPathDoesNotExist 2025-10-24T16:23:17.492434Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:16.220252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:16.220482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.220488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:16.220496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:16.220503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:16.220508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:16.220517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.220532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:16.221198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:16.221876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:16.431842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:16.436073Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.476489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:16.476535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:16.477631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:16.504745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:16.508085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:16.508673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:16.508770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:16.513906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.513964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:16.514528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:16.514541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.514571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:16.514581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:16.514588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:16.514621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:16.940660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.940900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.941676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:16.942553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:19.109350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:23:19.109361Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:19.109365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:19.109369Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:19.109372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:19.109376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:23:19.109381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:19.109385Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:23:19.109389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:23:19.109399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:19.109404Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:23:19.109408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-10-24T16:23:19.109411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:23:19.109521Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:19.109532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:19.109536Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:19.109540Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:23:19.109544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:19.109656Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:19.109666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:19.109670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:19.109676Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:23:19.109680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:19.109688Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:23:19.110266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:19.110308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-10-24T16:23:19.110362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:19.110369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-10-24T16:23:19.110384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:19.110388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:19.110453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:19.110473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:19.110478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:329:2318] 2025-10-24T16:23:19.110509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:19.110521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:19.110525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:329:2318] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:19.110581Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:19.110604Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 32us result status StatusSuccess 2025-10-24T16:23:19.110702Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:19.110754Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:19.110772Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 20us result status StatusSuccess 2025-10-24T16:23:19.110814Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:16.691527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:16.691734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.691740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:16.691747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:16.691754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:16.691759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:16.691963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:16.691978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:16.692547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:16.693117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:16.884634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:16.884663Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.912477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:16.912515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:16.912576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:16.917936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:16.918084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:16.918190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:16.918266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:16.919048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.919087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:16.919299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:16.919307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:16.919331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:16.919338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:16.919344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:16.919366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:17.107537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.107808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.108619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:23:17.109446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : 72057594046678944 2025-10-24T16:23:18.794112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:18.794119Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2025-10-24T16:23:18.794138Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-10-24T16:23:18.794156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-10-24T16:23:18.794164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:23:18.794716Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:18.794722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:18.794767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-10-24T16:23:18.794784Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:18.794789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:23:18.794793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-10-24T16:23:18.794820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:18.794825Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:23:18.794834Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:18.794837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:18.794841Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:18.794845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:18.794848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:23:18.794854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:18.794858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:23:18.794862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:23:18.794871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-10-24T16:23:18.794877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:23:18.794881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2025-10-24T16:23:18.794884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-10-24T16:23:18.794970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:18.794989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:18.794993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:18.794997Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2025-10-24T16:23:18.795000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-10-24T16:23:18.795094Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:18.795105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:18.795109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:18.795112Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-10-24T16:23:18.795116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-10-24T16:23:18.795125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:23:18.795146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:18.795152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-10-24T16:23:18.795159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-10-24T16:23:18.796139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:18.796646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:18.796664Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2025-10-24T16:23:18.796707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:18.796712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:23:18.796723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:18.796726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:18.796776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:18.796795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:18.796799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:727:2716] 2025-10-24T16:23:18.796814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:18.796825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:18.796828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:727:2716] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:18.796879Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:18.796899Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 26us result status StatusPathDoesNotExist 2025-10-24T16:23:18.796930Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-10-24T16:23:11.999507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:12.176826Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:12.176933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:12.176988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:12.282783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:12.282843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:12.287226Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:12.287755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322989598562 != 1761322989598566 2025-10-24T16:23:12.324061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7269, node 1 TClient is connected to server localhost:5146 2025-10-24T16:23:12.713554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:12.713577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:12.713581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:12.713866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:13.779372Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:13.780760Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:13.781653Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:13.781716Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:13.781724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:13.882535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:13.882579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:13.886622Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:13.887138Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1761322993156139 != 1761322993156143 2025-10-24T16:23:13.923028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7136, node 2 TClient is connected to server localhost:5454 2025-10-24T16:23:14.075755Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:14.075776Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:14.075780Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:14.075931Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:15.473343Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:15.476658Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:15.476733Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:15.476746Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:15.540720Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:15.540764Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:15.544868Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:15.545722Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1761322994536605 != 1761322994536608 2025-10-24T16:23:15.580508Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24310, node 3 TClient is connected to server localhost:62247 2025-10-24T16:23:15.711772Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:15.711789Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:15.711794Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:15.716356Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} 2025-10-24T16:23:16.945049Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:16.947817Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:16.947836Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:16.947899Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:17.070156Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:17.070203Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:17.075512Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:17.076297Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1761322996238671 != 1761322996238675 2025-10-24T16:23:17.111028Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29719, node 4 TClient is connected to server localhost:25698 2025-10-24T16:23:17.229355Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:17.229373Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:17.229377Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:17.229502Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"id","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"orders"}]},"Version":2} 2025-10-24T16:23:18.503518Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:18.509529Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:18.510518Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:18.510586Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:18.510594Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:18.588729Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:18.588769Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:18.592605Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:18.593032Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1761322997774157 != 1761322997774161 2025-10-24T16:23:18.628486Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7617, node 5 TClient is connected to server localhost:21712 2025-10-24T16:23:18.739200Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:18.739218Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:18.739222Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:18.739342Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-10-24T16:23:17.184039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:17.441752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:17.441829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:17.441861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:17.831390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:17.831680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:17.847472Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:17.853659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322992392420 != 1761322992392424 2025-10-24T16:23:17.896384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6414, node 1 TClient is connected to server localhost:6331 2025-10-24T16:23:18.524730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:18.524751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:18.524756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:18.524870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |53.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2025-10-24T16:23:16.371662Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824669279527361:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:16.376375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000955/r3tmp/tmpDt0k9S/pdisk_1.dat 2025-10-24T16:23:16.910843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:16.913759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:16.913995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:17.050077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:17.051229Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:17.117558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64432 TServer::EnableGrpc on GrpcPort 13707, node 1 2025-10-24T16:23:17.381246Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:18.188198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:18.188212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:18.188214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:18.188268Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:19.145752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:19.161857Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7564824682164429767:2303] Handshake: worker# [1:7564824682164429765:2301] 2025-10-24T16:23:19.161950Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7564824682164429767:2303] Create read session: session# [1:7564824682164429768:2304] |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> TSchemeShardTestFailureInjection::TestCreateChangePathStateFailedFailure |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets |54.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> RemoteTopicReader::ReadTopic [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> Viewer::Plan2SvgBad [GOOD] |54.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |54.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |54.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> TSchemeShardTestFailureInjection::TestCreateChangePathStateFailedFailure [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-10-24T16:23:15.952957Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824664731328587:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:15.953001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000945/r3tmp/tmpKEp1cL/pdisk_1.dat 2025-10-24T16:23:16.336312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:16.336383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:16.337454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:16.375313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:16.383541Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:16.547842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4972 TServer::EnableGrpc on GrpcPort 4524, node 1 2025-10-24T16:23:16.987600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:17.404857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:17.404869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:17.404871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:17.404915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:18.060649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:18.077103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:18.352208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:19.733374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824681911198643:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.733410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.733825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824681911198677:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.733831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824681911198678:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.733836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824681911198679:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.733918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:19.735580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:19.738422Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824681911198685:2449] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:19.744231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824681911198683:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:23:19.744243Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824681911198684:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:23:19.817484Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824681911198732:2480] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:19.829291Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824681911198750:2488] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:20.522352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:20.635511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:20.765383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-10-24T16:23:20.881522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-10-24T16:23:20.956632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824664731328587:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:20.956677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:23:20.982221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:21.251536Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handshake: worker# [1:7564824677616231073:2301] 2025-10-24T16:23:21.253333Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Create read session: session# [1:7564824690501133927:2300] 2025-10-24T16:23:21.255949Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:23:21.259981Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_10042418204786985422_v1 } } 2025-10-24T16:23:21.266781Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-10-24T16:23:21.150000Z WriteTime: 2025-10-24T16:23:21.155000Z MessageGroupId: producer ProducerId: producer }] } } 2025-10-24T16:23:21.268399Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:23:21.314809Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564824690501133926:2746] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-10-24T16:23:21.305000Z WriteTime: 2025-10-24T16:23:21.301000Z MessageGroupId: producer ProducerId: producer }] } } 2025-10-24T16:23:21.416035Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7564824690501134021:2779] Handshake: worker# [1:7564824677616231073:2301] 2025-10-24T16:23:21.417072Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7564824690501134021:2779] Create read session: session# [1:7564824690501134022:2300] 2025-10-24T16:23:21.419827Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564824690501134021:2779] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:23:21.440340Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7564824690501134021:2779] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_15275797664437325846_v1 } } 2025-10-24T16:23:21.459290Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564824690501134021:2779] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-10-24T16:23:21.305000Z WriteTime: 2025-10-24T16:23:21.301000Z MessageGroupId: producer ProducerId: producer }] } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: Data has built Merge = 0.4186769249 Data has merged 2025-10-24T16:23:09.334888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824637987184856:2137];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:09.334955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:09.350040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:09.428248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:09.428273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:09.429257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:09.459974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824637987184756:2081] 1761322989325179 != 1761322989325182 2025-10-24T16:23:09.466969Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25896, node 1 2025-10-24T16:23:09.565253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:09.587462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:09.587474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:09.587476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:09.587526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:09.652073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:09.656663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:23:09.665239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:09.666235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-10-24T16:23:09.667495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-10-24T16:23:09.667839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:09.668631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:10.147995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824642282152777:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.148025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.148316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824642282152789:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.148327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824642282152790:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.148346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.150694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:10.184019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824642282152793:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2025-10-24T16:23:10.269458Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824642282152844:2371] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:10.344167Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:11.388109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:11.388234Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 1166, node 2 2025-10-24T16:23:11.424088Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:11.460082Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:11.460097Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:11.460100Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:11.460151Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:11.480446Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:11.480477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:11.484217Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:11.544195Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables ex ... orizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.003144Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.076310Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.076343Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.129084Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:16.140413Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.140447Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.189307Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.189335Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.261281Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.261312Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.336165Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.336195Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.396838Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.396869Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.441794Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.441820Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.494698Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.494719Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.564207Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.564237Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.628178Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.628215Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.700177Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.700208Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.776419Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.776449Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.844367Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:16.844398Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:16.846266Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:16.846989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:16.850097Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:18.091552Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:18.091580Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:18.432150Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:18.432183Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:18.568484Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:18.568504Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:18.792286Z node 5 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2025-10-24T16:23:18.792353Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7564824678080473984:2606] TxId: 281474976710698. Ctx: { TraceId: 01k8bgg6xd1pk2qe2yx1s5r18q, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MTFkNzcxODAtYmViMzk2YTMtYjJiZjgwNzctMzMxMGMyY2U=, PoolId: default}. ABORTED: {
: Error: Client lost } 2025-10-24T16:23:18.792401Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=MTFkNzcxODAtYmViMzk2YTMtYjJiZjgwNzctMzMxMGMyY2U=, ActorId: [5:7564824678080473962:2606], ActorState: ExecuteState, TraceId: 01k8bgg6xd1pk2qe2yx1s5r18q, Create QueryResponse for error on request, msg: 2025-10-24T16:23:18.792492Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761322998827, txId: 281474976710697] shutting down 2025-10-24T16:23:18.792553Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564824678080473989:2612], TxId: 281474976710698, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgg6xd1pk2qe2yx1s5r18q. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=MTFkNzcxODAtYmViMzk2YTMtYjJiZjgwNzctMzMxMGMyY2U=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7564824678080473984:2606], status: ABORTED, reason: {
: Error: Terminate execution } 2025-10-24T16:23:19.445106Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7564824682350374971:2255];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:19.445150Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:19.461143Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:19.495269Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:19.495828Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:7564824682350374743:2081] 1761322999437490 != 1761322999437493 TServer::EnableGrpc on GrpcPort 14348, node 6 2025-10-24T16:23:19.552209Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:19.552222Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:19.552225Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:19.552279Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:19.564487Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:19.564522Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:19.568386Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23532 2025-10-24T16:23:19.697125Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:19.904454Z node 6 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (717F937C): Token is not supported test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:20.664581Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:20.664629Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:20.689582Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:20.689612Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:20.692211Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:20.694130Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:20.695937Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:7564824686902399158:2081] 1761323000601181 != 1761323000601184 TServer::EnableGrpc on GrpcPort 11675, node 7 2025-10-24T16:23:20.780062Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:20.780073Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:20.780076Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:20.780125Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13086 2025-10-24T16:23:20.883983Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:21.326732Z node 7 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (717F937C): Token is not supported >> TSchemeShardSubDomainTest::CreateDropNbs >> Viewer::TabletMergingPacked [FAIL] >> Viewer::VDiskMerging >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:21.994118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:21.994142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:21.994147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:21.994153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:21.994158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:21.994162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:21.994170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:21.994182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:21.995146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:21.995973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:22.111526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:22.111551Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:22.114801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:22.114837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:22.114885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:22.116577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:22.116796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:22.116930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.117023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:22.118021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.118073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:22.118351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.118363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.118389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:22.118397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:22.118404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:22.118429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.119978Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:22.199700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:22.199762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.199813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:22.199820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:22.202319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:22.202333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:22.206848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.206897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:22.206952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.206961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:22.206966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:22.206971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:22.208886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.208897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:22.208902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:22.209716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.209728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.209739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.209744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:22.212795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:22.220085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:22.221216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:22.221387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.221410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.221417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.221461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:22.221466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.221492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:22.221501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:22.222328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.222335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ard.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:22.274956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.274960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:23:22.274965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-10-24T16:23:22.275041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.275048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:23:22.275060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:22.275067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:22.275071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:22.275074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:22.275078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:23:22.275083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:22.275087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:23:22.275092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:23:22.275106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:22.275111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-10-24T16:23:22.275116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:22.275119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:22.275201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:22.275210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:22.275214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:22.275218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:22.275221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:22.275268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:22.275275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:22.275279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:22.275282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:22.275287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:22.275294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-10-24T16:23:22.290147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:22.290199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-10-24T16:23:22.290252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:22.290261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-10-24T16:23:22.290338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:22.290365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:22.290371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:314:2303] TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:22.290464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:22.290509Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 61us result status StatusSuccess 2025-10-24T16:23:22.290647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.290724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:22.290744Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2025-10-24T16:23:22.290815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDefine >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_failure_injection/unittest >> TSchemeShardTestFailureInjection::TestCreateChangePathStateFailedFailure [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:21.724355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:21.724387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:21.724392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:21.724400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:21.724407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:21.724411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:21.724420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:21.724436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:21.725078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:21.725987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:21.812081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:21.812112Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:21.835840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:21.835904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:21.835953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:21.841143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:21.841543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:21.841848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:21.841934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:21.843523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:21.843575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:21.843827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:21.843836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:21.843872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:21.843882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:21.843888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:21.843916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.847513Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:21.929229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:21.929344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.929424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:21.929433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:21.929510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:21.929525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:21.930627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:21.930693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:21.930769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.930793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:21.930800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:21.930806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:21.931276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.931287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:21.931294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:21.931645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.931655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:21.931662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:21.931671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:21.932467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:21.932882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:21.932931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:21.933154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:21.933180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:21.933188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:21.933262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:21.933269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:21.933305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:21.933318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:21.933727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:21.933737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 110, path id: 10 2025-10-24T16:23:22.556436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 110, path id: 11 2025-10-24T16:23:22.556589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.556605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 110:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 110 2025-10-24T16:23:22.557859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:23:22.557890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 5 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:23:22.557896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:23:22.557904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 5 2025-10-24T16:23:22.557914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-10-24T16:23:22.558038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:23:22.558052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 11 Version: 3 PathOwnerId: 72057594046678944, cookie: 110 2025-10-24T16:23:22.558056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 110 2025-10-24T16:23:22.558061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 11], version: 3 2025-10-24T16:23:22.558066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 4 2025-10-24T16:23:22.558077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 0/1, is published: true 2025-10-24T16:23:22.558840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6705: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 110 Step: 5000011 OrderId: 110 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 814 } } CommitVersion { Step: 5000011 TxId: 110 } 2025-10-24T16:23:22.558859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2025-10-24T16:23:22.558892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 110 Step: 5000011 OrderId: 110 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 814 } } CommitVersion { Step: 5000011 TxId: 110 } 2025-10-24T16:23:22.558911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 110 Step: 5000011 OrderId: 110 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 814 } } CommitVersion { Step: 5000011 TxId: 110 } 2025-10-24T16:23:22.559966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 783 RawX2: 4294970022 } Origin: 72075186233409549 State: 2 TxId: 110 Step: 0 Generation: 2 2025-10-24T16:23:22.559981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2025-10-24T16:23:22.560002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Source { RawX1: 783 RawX2: 4294970022 } Origin: 72075186233409549 State: 2 TxId: 110 Step: 0 Generation: 2 2025-10-24T16:23:22.560016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 110:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:23:22.560025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 110:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 783 RawX2: 4294970022 } Origin: 72075186233409549 State: 2 TxId: 110 Step: 0 Generation: 2 2025-10-24T16:23:22.560042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 110:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.560047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.560053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 110:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-10-24T16:23:22.560062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 110:0 129 -> 240 2025-10-24T16:23:22.560634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-10-24T16:23:22.561239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-10-24T16:23:22.561306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.562278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.562352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.562362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 110:0 ProgressState 2025-10-24T16:23:22.562383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:23:22.562389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:23:22.562395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#110:0 progress is 1/1 2025-10-24T16:23:22.562399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:23:22.562404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: true 2025-10-24T16:23:22.562420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:307:2296] message: TxId: 110 2025-10-24T16:23:22.562428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-10-24T16:23:22.562435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 110:0 2025-10-24T16:23:22.562443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 110:0 2025-10-24T16:23:22.562470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-10-24T16:23:22.568118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-10-24T16:23:22.568139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:755:2701] TestWaitNotification: OK eventTxId 110 TestModificationResults wait txId: 111 TestModificationResult got TxId: 110, wait until txId: 111 TestModificationResults wait txId: 111 2025-10-24T16:23:22.570304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/" OperationType: ESchemeOpRestoreBackupCollection RestoreBackupCollection { Name: "FailInj_TestBackup_1252246181752200223" } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:22.570411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 111:0, explain: Create change path state failed, at schemeshard: 72057594046678944 2025-10-24T16:23:22.570423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusMultipleModifications, reason: Create change path state failed, at schemeshard: 72057594046678944 2025-10-24T16:23:22.571404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusMultipleModifications Reason: "Create change path state failed" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.571479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Create change path state failed, operation: RESTORE, path: /MyRoot/.backups/collections//FailInj_TestBackup_1252246181752200223 TestModificationResult got TxId: 111, wait until txId: 111 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:22.423908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:22.423932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.423938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:22.423945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:22.423951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:22.423955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:22.423964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.423975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:22.424079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:22.424132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:22.538050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:22.538074Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:22.553625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:22.553720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:22.554922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:22.570140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:22.570218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:22.570546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.570850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:22.571964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.572012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:22.572236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.572244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.572261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:22.572268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:22.572273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:22.572287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.576541Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:23:22.692600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:22.692668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.692724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:22.692731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:22.694504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:22.694544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:22.697211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.697267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:22.697334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.697347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:22.697353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:22.697359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:22.698132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.698146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:22.698151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:22.699012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.699023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.699034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.699040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:22.702486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:22.703733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:22.703770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:22.703986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.704013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.704020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.704558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:22.704567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.704862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:22.704875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:22.707083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.707094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ionPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.269606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.269612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-10-24T16:23:23.269620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-10-24T16:23:23.271614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.271918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:23:23.382859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-10-24T16:23:23.382909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-10-24T16:23:23.382919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-10-24T16:23:23.382932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.382936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-10-24T16:23:23.382940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-10-24T16:23:23.383842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-10-24T16:23:23.383885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-10-24T16:23:23.383893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-10-24T16:23:23.383902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.383906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-10-24T16:23:23.383952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:23:23.383981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:23.383992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:23:23.384436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.385139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.385178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.385186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.385224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:23.385256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.385261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2316], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-10-24T16:23:23.385265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:341:2316], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-10-24T16:23:23.385426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.385433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:23:23.385445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:23.385449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:23.385453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:23.385456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:23.385460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-10-24T16:23:23.385465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:23.385471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:23:23.385476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:23:23.385504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-10-24T16:23:23.385510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-10-24T16:23:23.385514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:23.385517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:23:23.386039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:23.386055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:23.386061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:23:23.386066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:23.386071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:23:23.386713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:23.386724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:23.386727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:23:23.386731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:23:23.386735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-10-24T16:23:23.386746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-10-24T16:23:23.388395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:23:23.391893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] |54.0%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.129306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.129338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.129344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.129352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.129360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.129365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.129375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.129391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.129515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.129590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.171773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.171800Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.192985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.193027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.193957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.198163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.198492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.198816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.199087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.200998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.201052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.202022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.202032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.202057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.202066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.202073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.202099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.204922Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.299997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.300091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.300160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.300169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.302311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.302330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.303403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.303460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.303516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.303527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.303532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.303537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.304130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.304141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.304146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.304923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.304932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.304945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.304951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.308292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.310191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.311063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.311306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.311333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.311341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.311402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.311409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.311446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.311458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.312403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.312414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ange state for txid 101:0 128 -> 240 2025-10-24T16:23:23.404325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-10-24T16:23:23.404356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.404365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-10-24T16:23:23.404372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-10-24T16:23:23.406506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.406516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.406577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:23.406593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.406598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-10-24T16:23:23.406603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:23.406695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.406702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:23.406715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.406721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.406727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.406731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.406736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:23.406742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.406747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:23.406752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:23.407817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-10-24T16:23:23.407827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:23.407832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:23.407835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:23.407969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.407984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.407990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:23.407996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:23.408001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:23.408102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.408111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.408117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:23.408121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:23.408124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-10-24T16:23:23.408132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:23.409625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:23.409995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-10-24T16:23:23.410068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:23.410076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-10-24T16:23:23.410089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:23.410092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:23.410152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:23.410173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.410178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:599:2515] 2025-10-24T16:23:23.410196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:23.410221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.410224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:599:2515] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:23.410288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.410326Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 48us result status StatusSuccess 2025-10-24T16:23:23.410445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.258539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.258564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.258569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.258576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.258582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.258587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.258596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.258608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.258718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.258768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.302522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.302551Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.323163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.323199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.324045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.330136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.330564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.330887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.330977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.334274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.334334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.335224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.335235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.335256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.335263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.335268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.335291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.340802Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.434545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.434600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.434641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.434646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.436830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.436844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.437752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.437791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.437832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.437839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.437843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.437847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.438335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.438344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.438349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.439067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.439076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.439085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.439091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.441714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.443522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.444508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.444668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.444688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.444694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.444737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.444743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.444765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.444774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.445653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.445660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:23.515510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.515514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:23:23.515519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-10-24T16:23:23.515553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.515558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:23:23.515568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:23.515571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.515577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:23.515580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.515585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:23:23.515590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.515594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:23:23.515598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:23:23.515621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:23:23.515627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-10-24T16:23:23.515632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:23.515635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:23.515733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.515743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.515747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:23.515751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:23.515755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:23.515823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.515830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.515833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:23.515836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:23.515840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:23.515858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-10-24T16:23:23.517000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:23.517016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-10-24T16:23:23.517054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:23.517059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-10-24T16:23:23.517098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:23.517112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.517116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:456:2411] TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:23.517174Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.517198Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 31us result status StatusSuccess 2025-10-24T16:23:23.517293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.517360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.517371Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2025-10-24T16:23:23.517416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::SimultaneousDeclare ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.480890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.480911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.480916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.480921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.480927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.480930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.480938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.480949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.481814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.482586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.596659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.596684Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.604981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.605036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.605517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.608393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.608906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.609184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.609471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.611557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.611602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.612325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.612341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.612363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.612372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.612378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.612400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.614648Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.671649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.671714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.671763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.671770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.674111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.674133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.676889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.676937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.676993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.677001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.677006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.677011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.680244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.680686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.680714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.681440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.681797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.681839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.682031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.682054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.682060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.682105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.682111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.682136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.682146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.682477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.682496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3.838638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:23.838641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:23.838727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:23.839085Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-10-24T16:23:23.839340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.839389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:23:23.839757Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-10-24T16:23:23.839824Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:23.839844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:23.839903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:23.839966Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-10-24T16:23:23.839988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:23.840005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:23.840108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:23.840123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-10-24T16:23:23.840289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.840293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:23.840302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-10-24T16:23:23.840427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.840434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:23.840453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:23.840479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:23.840539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:23.840928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:23.840937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:23.841968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:23.841976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:23.841986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:23.841989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:23.841997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:23.842001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:23.842028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:23.842039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:23.842047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.842051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:23.842063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.848143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:23:23.848205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:23.848211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:23.848274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:23.848289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.848293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:543:2497] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:23.849812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.849848Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 49us result status StatusPathDoesNotExist 2025-10-24T16:23:23.849884Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:23.849947Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.849958Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 12us result status StatusPathDoesNotExist 2025-10-24T16:23:23.849971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.669541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.669562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.669567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.669573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.669579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.669583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.669591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.669604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.669698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.669741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.773541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.773560Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.776349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.776378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.776421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.777668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.777821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.777938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.778013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.778852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.778898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.779122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.779133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.779152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.779159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.779164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.779182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.781370Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.803549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.803624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.803685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.803694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.803774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.803791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.804448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.804501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.804555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.804565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.804571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.804577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.804979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.804991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.804996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.805327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.805339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.805351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.805359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.806012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.806433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.806487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.806711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.806739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.806748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.806807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.806814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.806845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.806857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.807261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.807269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-10-24T16:23:23.858801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.858811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.858848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:23.858867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.858873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:23:23.858880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-10-24T16:23:23.858964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.858973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:23:23.858988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:23.858993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.858998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:23.859002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.859008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:23:23.859014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:23.859021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:23:23.859027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:23:23.859095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-10-24T16:23:23.859104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-10-24T16:23:23.859112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:23.859115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:23.859248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.859263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.859268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:23.859274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:23.859279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:23.859433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.859448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:23.859452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:23.859457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:23.859461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-10-24T16:23:23.859472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-10-24T16:23:23.859478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:570:2486] 2025-10-24T16:23:23.864058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:23.876268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:23.876310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.876315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:571:2487] TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:23.876441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.876494Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 60us result status StatusSuccess 2025-10-24T16:23:23.876616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-10-24T16:23:23.878008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.878037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.878060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2025-10-24T16:23:23.878770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.878812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.864849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.864872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.864878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.864885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.864892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.864898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.864908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.864925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.865039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.865089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.887438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.887457Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.890528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.890568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.890610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.891698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.891842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.891987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.892847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.893129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.893138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.893167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.893175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.893181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.893204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.894540Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.916946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.917025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.917086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.917163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.917179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.917739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.917842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.917857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.917861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.918202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.918507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.918547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.919227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.919609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.919650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.919865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.919948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.919954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.919978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.919989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.920367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.920375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 16:23:23.940842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-10-24T16:23:23.940848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 3 -> 128 2025-10-24T16:23:23.941289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.941690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.941711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.941730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.941736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.941742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-10-24T16:23:23.941749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.941778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.942079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-10-24T16:23:23.942101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-10-24T16:23:23.942171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.942190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.942196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-10-24T16:23:23.942260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 128 -> 240 2025-10-24T16:23:23.942268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-10-24T16:23:23.942294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:23.942307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-10-24T16:23:23.942710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.942718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:23.942768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.942774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:23.942785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.942792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:23.942803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.942808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.942813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.942816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.942821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:23.942826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.942832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:23.942836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:23.942863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:23:23.942870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-10-24T16:23:23.942898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-10-24T16:23:23.943058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.943071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.943076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:23.943081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:23:23.943086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:23.943098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-10-24T16:23:23.943103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:313:2302] 2025-10-24T16:23:23.943614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:23.943633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.943639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:320:2309] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:23.943737Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.943770Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusSuccess 2025-10-24T16:23:23.943887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.503874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.503905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.503912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.503921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.503928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.503933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.503945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.503961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.504087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.504149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.520771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.520795Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.536306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.536340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.536394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.554787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.555882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.556410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.556487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.558798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.558861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.559569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.559579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.559605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.559614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.559620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.559650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.562498Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.666315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.666378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.666426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.666434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.666493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.666505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.667902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.667958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.668003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.668011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.668015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.668020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.668733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.668744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.668749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.669130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.669140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.669151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.669165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.669784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.670521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.671322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.671506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.671528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.671535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.671584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.671590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.671615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.671624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.671964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.671971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... : 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:23:23.672491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.672501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:23:23.672912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:23:23.672987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-10-24T16:23:23.673120Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:276:2265] Bootstrap 2025-10-24T16:23:23.673935Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:276:2265] Become StateWork (SchemeCache [1:281:2270]) 2025-10-24T16:23:23.675824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.675868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.675886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-10-24T16:23:23.676082Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:23:23.677827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.678079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-10-24T16:23:23.678183Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-10-24T16:23:23.678999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.679027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.679040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-10-24T16:23:23.679688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.679733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-10-24T16:23:23.679800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:23.679808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-10-24T16:23:23.679821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:23.679824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:23.679891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:23.679913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.679917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:295:2284] 2025-10-24T16:23:23.679937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:23.679961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.679964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:295:2284] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:23.680025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680050Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 31us result status StatusPathDoesNotExist 2025-10-24T16:23:23.680085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680156Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 12us result status StatusPathDoesNotExist 2025-10-24T16:23:23.680169Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.680229Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 18us result status StatusSuccess 2025-10-24T16:23:23.680306Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.869997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.870023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.870029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.870038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.870046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.870051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.870061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.870076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.870200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.870254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.886982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.887006Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.890363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.890397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.890448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.891731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.891896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.892006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.892973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.893010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.893224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.893234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.893258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.893265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.893272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.893292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.894548Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.916946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.917027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.917086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.917165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.917180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.917769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.917862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.917871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.917876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.917884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.918277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.918660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.918689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.919405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.919814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.919874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.920060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.920086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.920093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.920142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.920150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.920173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.920184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.920602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.920611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... TICE: schemeshard__init.cpp:1819: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-10-24T16:23:23.952850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2092: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2152: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2210: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:23.952899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:23.952903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:23.952918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2296: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2362: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.952971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.953364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.954956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.955517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.955534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.955545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.955555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.955563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.955605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-10-24T16:23:24.028044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:24.028069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:460:2412] sender: [1:522:2058] recipient: [1:15:2062] 2025-10-24T16:23:24.028312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:24.028355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:24.028361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:520:2458] TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:24.028476Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:24.028525Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 64us result status StatusSuccess 2025-10-24T16:23:24.028674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:24.028777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:24.028797Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-10-24T16:23:24.028873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.356525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.356552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.356558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.356565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.356572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.356576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.356587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.356601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.357365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.357902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.500863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.500898Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.511772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.511810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.511880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.525404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.526364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.526659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.526953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.528761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.528805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.529555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.529567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.529588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.529597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.529602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.529619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.532037Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.610856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.610920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.610970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.610977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.612591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.612617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.616876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.616956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.617035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.617051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.617057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.617064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.617564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.617575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.617580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.617893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.617902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.617913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.617920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.618616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.619008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.619057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.619252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.619276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.619283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.619339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.619345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.619375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.619387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.619782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.619790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.891165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, at schemeshard: 72057594046678944 2025-10-24T16:23:23.891592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-10-24T16:23:23.891647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp:155, operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-10-24T16:23:23.891757Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.891799Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 45us result status StatusSuccess 2025-10-24T16:23:23.891920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.891994Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892026Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 32us result status StatusSuccess 2025-10-24T16:23:23.892142Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892237Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 17us result status StatusSuccess 2025-10-24T16:23:23.892280Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892337Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.892351Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 16us result status StatusSuccess 2025-10-24T16:23:23.892399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> Viewer::FloatPointJsonQuery [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.851086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.851110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.851116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.851125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.851133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.851138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.851149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.851165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.851302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.851358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.869343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.869365Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.872847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.872880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.872931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.874193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.874361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.874502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.874603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.875489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.875541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.875791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.875801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.875829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.875838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.875845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.875889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.877275Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.900543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.900626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.900701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.900710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.900789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.900806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.904228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.904307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.904387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.904403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.904410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.904417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.904999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.905015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.905021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.905406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.905417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.905429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.905437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.906161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.906600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.906652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.906865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.906893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.906902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.906958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.906966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.906995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.907008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.907447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.907457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... perationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.918280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:23.918293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.918298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.918304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:23.918308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.918314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:23.918320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:23.918325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:23.918330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:23.918342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:23.918351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:23.918356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-10-24T16:23:23.918361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:23.918479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.918491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.918497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:23.918502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-10-24T16:23:23.918507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:23.918672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.918685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:23.918690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:23.918695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:23.918700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:23.918709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:23.919274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:23.919468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-10-24T16:23:23.919538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:23.919547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-10-24T16:23:23.919564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:23.919568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:23.919646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.919679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:342:2331] 2025-10-24T16:23:23.919693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.919696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:342:2331] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:23.919760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919785Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 32us result status StatusSuccess 2025-10-24T16:23:23.919906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.919995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.920018Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 25us result status StatusSuccess 2025-10-24T16:23:23.920074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:22.876162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:22.876183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.876189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:22.876195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:22.876200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:22.876204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:22.876214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.876225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:22.877045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:22.878048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:22.958681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:22.958698Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:22.961877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:22.961902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:22.961945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:22.962969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:22.963100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:22.963203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.963260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:22.963978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.964018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:22.964217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.964225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.964249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:22.964256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:22.964262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:22.964281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.965298Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:22.984675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:22.984733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.984783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:22.984790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:22.986196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:22.986213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:22.987204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.987251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:22.987301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.987309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:22.987315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:22.987319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:22.987857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.987867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:22.987871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:22.988532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.988540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.988552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.988558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:22.991759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:22.993355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:22.994328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:22.994534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.994561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.994569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.994621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:22.994628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.994658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:22.994669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:22.995482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.995489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:24.146207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:24.146217Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-10-24T16:23:24.146280Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 128 -> 129 2025-10-24T16:23:24.146333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:24.146348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:23:24.146922Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:24.146936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:24.146990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:23:24.147017Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:24.147022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:343:2318], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-10-24T16:23:24.147028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:343:2318], at schemeshard: 72057594046678944, txId: 108, path id: 5 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-10-24T16:23:24.147192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.147200Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-10-24T16:23:24.147210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-10-24T16:23:24.147361Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:24.147374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:24.147379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-10-24T16:23:24.147385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-10-24T16:23:24.147390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:23:24.147592Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:24.147606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:24.147611Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-10-24T16:23:24.147616Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-10-24T16:23:24.147621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:23:24.147636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-10-24T16:23:24.148178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-10-24T16:23:24.148292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-10-24T16:23:24.148560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-10-24T16:23:24.160707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6566: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-10-24T16:23:24.160733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-10-24T16:23:24.160761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-10-24T16:23:24.160776Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 108 2025-10-24T16:23:24.161313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.161362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.161372Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-10-24T16:23:24.161391Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:23:24.161396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:24.161403Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:23:24.161407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:24.161415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-10-24T16:23:24.161432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:501:2450] message: TxId: 108 2025-10-24T16:23:24.161441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:24.161448Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 108:0 2025-10-24T16:23:24.161453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 108:0 2025-10-24T16:23:24.161488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:23:24.161925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-10-24T16:23:24.161938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:896:2806] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-10-24T16:23:24.163096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:24.163149Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.163255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:24.163776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:24.163833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-10-24T16:23:24.163942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-10-24T16:23:24.163950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-10-24T16:23:24.164049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-10-24T16:23:24.164073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-10-24T16:23:24.164078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:931:2841] TestWaitNotification: OK eventTxId 109 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.108598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.108627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.108632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.108638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.108644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.108647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.108656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.108666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.109207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.109509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.214963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.214990Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.264314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.264355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.265114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.300507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.301299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.301688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.301817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.316733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.316806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.317792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.317811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.317839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.317849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.317856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.317883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.329325Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.523620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.523695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.523758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.523766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.525709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.525731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.527709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.527767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.527826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.527837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.527842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.527859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.528562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.528576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.528582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.529476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.529487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.529498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.529505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.532546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.533994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.534843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.535036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.535069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.535078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.535135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.535143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.535170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.535181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.540238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.540249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... teTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 2025-10-24T16:23:23.864125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-10-24T16:23:23.864193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:23.864654Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:23:23.864694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.864731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409550 2025-10-24T16:23:23.865374Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-10-24T16:23:23.865431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2025-10-24T16:23:23.865750Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:23.865966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:23.866003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:23.866260Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-10-24T16:23:23.866297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:23.866319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-10-24T16:23:23.866588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:23.866614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-10-24T16:23:23.866720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-10-24T16:23:23.866841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.866849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:23.866862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:23.866981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.866989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:23.867014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:23.867802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:23:23.867819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-10-24T16:23:23.867836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:23.867841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:23.868374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:23.868386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:23.868415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:23.868420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:23.868431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:23.868437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:23.868515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:23.868554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:23.868559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:23.868574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.868956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:23.869302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-10-24T16:23:23.869396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:23:23.869405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:23:23.869505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:23:23.869527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:23:23.869533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:908:2800] TestWaitNotification: OK eventTxId 106 2025-10-24T16:23:23.869647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:23.869699Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 66us result status StatusSuccess 2025-10-24T16:23:23.869823Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |54.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |54.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |54.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |54.1%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:25.016728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:25.016750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:25.016756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:25.016762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:25.016769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:25.016774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:25.016783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:25.016795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:25.016888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:25.016940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:25.030400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:25.030422Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:25.033116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:25.033141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:25.033193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:25.034389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:25.034535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:25.034650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.034725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:25.035408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.035455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:25.035676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.035685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.035708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:25.035717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:25.035723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:25.035746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.036849Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:25.096731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:25.096807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.096872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:25.096879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:25.099671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:25.099688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:25.100952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.101006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:25.101069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.101078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:25.101084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:25.101089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:25.101666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.101675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:25.101680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:25.102499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.102507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.102518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.102525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:25.106113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:25.108035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:25.108699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:25.108903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.108926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:25.108933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.108993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:25.109000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.109031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:25.109043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:25.109377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.109384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 025-10-24T16:23:25.686352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.686411Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 70us result status StatusSuccess 2025-10-24T16:23:25.688683Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:25.688793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.688822Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 32us result status StatusSuccess 2025-10-24T16:23:25.688902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:25.688960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.688972Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 13us result status StatusSuccess 2025-10-24T16:23:25.689016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:25.689054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.689067Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 14us result status StatusSuccess 2025-10-24T16:23:25.689117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:24.125470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:24.125496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:24.125502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:24.125511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:24.125519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:24.125523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:24.125536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:24.125553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:24.125684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:24.125743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:24.146726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:24.146748Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:24.156517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:24.156568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:24.156640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:24.159028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:24.159337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:24.159503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:24.159610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:24.160807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:24.160880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:24.161204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:24.161219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:24.161250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:24.161261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:24.161268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:24.161300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.163124Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:24.186999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:24.187093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.187163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:24.187171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:24.187246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:24.187260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:24.188287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:24.188360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:24.188440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.188452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:24.188459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:24.188465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:24.189121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.189134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:24.189140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:24.189556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.189567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:24.189580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:24.189588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:24.190347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:24.190868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:24.190938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:24.191177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:24.191203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:24.191213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:24.191275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:24.191284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:24.191318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:24.191331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:24.191791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:24.191801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 5.571666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:25.571670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:25.571723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-10-24T16:23:25.571817Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-10-24T16:23:25.572458Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-10-24T16:23:25.572551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.572620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:23:25.572713Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:25.572818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:25.573246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-10-24T16:23:25.573431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:25.573470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:25.573518Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-10-24T16:23:25.573708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:25.573739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:25.573866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:25.573872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:25.573882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:25.573969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:25.573975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:25.573996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:25.574032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-10-24T16:23:25.574099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-10-24T16:23:25.574801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:25.574812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:25.574824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:25.574828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:25.575428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:25.575436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:25.575446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:25.575451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:25.575478Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:25.575490Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:25.575498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:25.575502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:25.575512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:25.576084Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-10-24T16:23:25.576145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-10-24T16:23:25.576151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-10-24T16:23:25.576207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-10-24T16:23:25.576223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:23:25.576230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:660:2611] TestWaitNotification: OK eventTxId 105 2025-10-24T16:23:25.576299Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.576328Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 36us result status StatusPathDoesNotExist 2025-10-24T16:23:25.576363Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:25.576427Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.576439Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 13us result status StatusPathDoesNotExist 2025-10-24T16:23:25.576452Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> Viewer::CommitOffsetTest >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:24.956602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:24.956624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:24.956629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:24.956634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:24.956640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:24.956643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:24.956651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:24.956668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:24.957611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:24.958495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:25.261208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:25.261227Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:25.278833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:25.278869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:25.278913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:25.285340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:25.285526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:25.285644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.285723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:25.286646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.286691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:25.286917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.286927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.286951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:25.286959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:25.286966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:25.286988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.288278Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:25.523136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:25.523208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.523263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:25.523272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:25.531949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:25.531983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:25.540258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.540604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:25.540660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.540670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:25.540674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:25.540679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:25.548151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.548169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:25.548176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:25.556044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.556058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.556069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.556076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:25.559034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:25.568056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:25.569058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:25.569286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.569322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:25.569332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.569391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:25.569400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:25.569432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:25.569446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:25.576194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.576208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... e 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-10-24T16:23:25.593207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 100:0 128 -> 240 2025-10-24T16:23:25.593213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-10-24T16:23:25.593237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:25.593245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:25.593252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-10-24T16:23:25.593832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.593839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:25.593872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:25.593885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.593890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:23:25.593895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-10-24T16:23:25.593903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.593908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:23:25.593919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:25.593922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:25.593927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:25.593930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:25.593936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:23:25.593940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:25.593945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:23:25.593948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:23:25.593958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:25.593963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-10-24T16:23:25.593967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:25.593971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:25.594110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:25.594119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:25.594123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:25.594128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:25.594131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:25.594216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:25.594224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:25.594227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:25.594231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:25.594234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:25.594240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-10-24T16:23:25.604512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:25.604540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-10-24T16:23:25.604594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:25.604600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-10-24T16:23:25.604612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:25.604615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:25.604677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:25.604701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:25.604706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:320:2309] 2025-10-24T16:23:25.604739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:25.604746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:25.604749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:320:2309] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:25.604807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:25.604845Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 51us result status StatusSuccess 2025-10-24T16:23:25.604946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:26.407881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:26.407903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.407908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:26.407914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:26.407919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:26.407922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:26.408160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.408171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:26.409093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:26.410000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:26.564454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:26.564471Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:26.589336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:26.589366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:26.590543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:26.595107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:26.595231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:26.595327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.595391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:26.597856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.597898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:26.598647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:26.598660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.598681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:26.598688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:26.598693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:26.598714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.601896Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:26.715963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:26.716015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.716054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:26.716060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:26.718880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:26.718898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:26.720631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.720673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:26.720716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.720723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:26.720727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:26.720732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:26.721270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.721279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:26.721283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:26.722063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.722071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.722080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.722085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:26.725783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:26.726719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:26.727861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:26.728028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.728047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:26.728053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.728098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:26.728104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.728129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:26.728140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:26.729145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:26.729154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... e target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:26.751116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:26.751120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-10-24T16:23:26.751125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:26.751213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:26.751222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:26.751227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:26.751230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-10-24T16:23:26.751234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:26.751303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:26.751310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:26.751314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:26.751317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:26.751320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:26.751327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:26.752357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:26.752376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:26.752408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:26.752413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:26.752459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:26.752477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:340:2329] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:26.752532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752553Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 27us result status StatusSuccess 2025-10-24T16:23:26.752658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752735Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752755Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 23us result status StatusSuccess 2025-10-24T16:23:26.752819Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752869Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:26.752883Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 16us result status StatusSuccess 2025-10-24T16:23:26.754399Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:27.561143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:27.561167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:27.561173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:27.561180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:27.561187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:27.561192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:27.561202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:27.561600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:27.562862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:27.563720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:28.002162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:28.002180Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:28.038167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:28.038194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:28.039363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:28.044654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:28.045230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:28.045827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:28.045889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:28.047972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.048018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:28.049030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:28.049039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.049062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:28.049071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:28.049077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:28.049100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.051832Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:28.309847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:28.309922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.309979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:28.309987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:28.320786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:28.320821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:28.336235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:28.336299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:28.336365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.336378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:28.336384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:28.336391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:28.344172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.344189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:28.344195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:28.360292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.360310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.360327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:28.360336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:28.364379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:28.380311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:28.381376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:28.381544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:28.381569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:28.381576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:28.381620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:28.381627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:28.381649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:28.381659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:28.393587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:28.393598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... pace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:28.423773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-10-24T16:23:28.423802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-10-24T16:23:28.423887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:28.423904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:28.423910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-10-24T16:23:28.423954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 100:0 128 -> 240 2025-10-24T16:23:28.423962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-10-24T16:23:28.423986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:28.423993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:28.424000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-10-24T16:23:28.425202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:28.425210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:28.425233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:28.425246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.425250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:23:28.425256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-10-24T16:23:28.425293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.425298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:23:28.425310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:28.425314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:28.425319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:23:28.425322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:28.425325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:23:28.425331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:23:28.425335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:23:28.425339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:23:28.425348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:28.425354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-10-24T16:23:28.425359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:28.425362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:28.425452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:28.425461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:28.425465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:28.425469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:28.425473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:28.425575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:28.425584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:23:28.425587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:23:28.425591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:28.425594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:28.425601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-10-24T16:23:28.425606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:282:2271] 2025-10-24T16:23:28.426367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:28.426415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:23:28.426425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:28.426430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:283:2272] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:28.426511Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:28.426534Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 28us result status StatusSuccess 2025-10-24T16:23:28.426641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:26.212740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:26.212762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.212766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:26.212772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:26.212777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:26.212781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:26.212790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.212800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:26.213644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:26.214542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:26.437381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:26.437402Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:26.508488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:26.508520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:26.509682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:26.540448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:26.544042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:26.544747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.544829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:26.564304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.564360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:26.565626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:26.565640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.565662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:26.565670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:26.565675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:26.565698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.580094Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:26.959356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:26.959411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.959461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:26.959468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:26.967215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:26.967239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:26.969303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.969348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:26.969396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.969404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:26.969408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:26.969413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:26.972693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.972704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:26.972710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:26.980058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.980070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.980081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.980088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:26.984175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:26.992117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:26.993283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:26.993474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.993505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:26.993513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.993562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:26.993569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.993595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:26.993605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:27.000220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:27.000230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 28907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:27.328994Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-10-24T16:23:27.329361Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:27.329395Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-10-24T16:23:27.329425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-10-24T16:23:27.329464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-10-24T16:23:27.336544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:27.336593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-10-24T16:23:27.336726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:27.336743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-10-24T16:23:27.337049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:27.337056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:27.337078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:27.337949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:23:27.337963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-10-24T16:23:27.338015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:27.338022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:27.338035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:27.338240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:27.338248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:27.338267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:27.338270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:27.339491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:23:27.339503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-10-24T16:23:27.339520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:27.339527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:27.339544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:27.339549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:27.340173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-10-24T16:23:27.340245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:27.340251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:23:27.340265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:27.340269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:27.340336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:27.340357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:660:2566] 2025-10-24T16:23:27.340377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:27.340394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:660:2566] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:27.340458Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340488Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusPathDoesNotExist 2025-10-24T16:23:27.340522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:27.340585Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2025-10-24T16:23:27.340652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:25.605625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:25.605648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:25.605653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:25.605659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:25.605664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:25.605668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:25.605676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:25.605688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:25.606663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:25.607534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:25.818995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:25.819017Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:25.856309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:25.856348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:25.857389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:25.860415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:25.860576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:25.860893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:25.860966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:25.863243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.863287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:25.864296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:25.864306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:25.864327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:25.864334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:25.864339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:25.864359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:25.867156Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:26.003047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:26.003134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.003204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:26.003215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:26.005186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:26.005207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:26.007519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.007577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:26.007639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.007649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:26.007655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:26.007660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:26.008354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.008367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:26.008372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:26.009230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.009239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.009250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.009258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:26.012719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:26.013723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:26.014856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:26.015072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.015096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:26.015103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.015162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:26.015168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:26.015198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:26.015211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:26.016167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:26.016175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 944, LocalPathId: 2] was 11 2025-10-24T16:23:28.218883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-10-24T16:23:28.224362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-10-24T16:23:28.224423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-10-24T16:23:28.224479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:28.224485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:28.224538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-10-24T16:23:28.224552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.224557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1026:2888], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-10-24T16:23:28.224563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1026:2888], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-10-24T16:23:28.224670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.224680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-10-24T16:23:28.224721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-10-24T16:23:28.224853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-10-24T16:23:28.224863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-10-24T16:23:28.224868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-10-24T16:23:28.224873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-10-24T16:23:28.224878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-10-24T16:23:28.225027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-10-24T16:23:28.225038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-10-24T16:23:28.225041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-10-24T16:23:28.225045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-10-24T16:23:28.225051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-10-24T16:23:28.225061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-10-24T16:23:28.236644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-10-24T16:23:28.236684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-10-24T16:23:28.236690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-10-24T16:23:28.237167Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-10-24T16:23:28.237207Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-10-24T16:23:28.237225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6287: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-10-24T16:23:28.237231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-10-24T16:23:28.237246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-10-24T16:23:28.237254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-10-24T16:23:28.237261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-10-24T16:23:28.237282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 137:0 2 -> 3 2025-10-24T16:23:28.237431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-10-24T16:23:28.238722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-10-24T16:23:28.239361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.239410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.239417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:216: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-10-24T16:23:28.239430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:236: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-10-24T16:23:28.239506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:252: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 974 RawX2: 4294970143 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-10-24T16:23:28.248470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-10-24T16:23:28.248511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::Delete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.216130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.216147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.216151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.216157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.216162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.216165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.216173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.216183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.216957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.217558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.254967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.254988Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.258264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.258294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.258337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.259454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.259588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.259690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.259762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.260479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.260521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.260715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.260722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.260745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.260754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.260760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.260779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.261760Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.390589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.390648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.390695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.390702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.399900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.399934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.402943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.402993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.403044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.403054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.403060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.403066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.403609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.403618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.403623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.408015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.408029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.408041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.408047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.411291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.416026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.416894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.417088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.417119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.417127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.417180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.417188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.417216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.417228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.418641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.418651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... eply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:29.114769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:29.115306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-10-24T16:23:29.115330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-10-24T16:23:29.115339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-10-24T16:23:29.115387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-10-24T16:23:29.115405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-10-24T16:23:29.115416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 129 -> 240 2025-10-24T16:23:29.115554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:29.115564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:29.115568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:29.115572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-10-24T16:23:29.115577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-10-24T16:23:29.115742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:29.115751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:29.115755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:29.115758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:29.115762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-10-24T16:23:29.115770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-10-24T16:23:29.120288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:29.120299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-10-24T16:23:29.120358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-10-24T16:23:29.120396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:29.120400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:29.120405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:29.120410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:29.120414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-10-24T16:23:29.120424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2490] message: TxId: 104 2025-10-24T16:23:29.120430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:29.120435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:23:29.120439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:23:29.120453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-10-24T16:23:29.120913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:29.120921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:29.124524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:29.124640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:29.125176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:29.125185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-10-24T16:23:29.125210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:23:29.125215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:743:2660] 2025-10-24T16:23:29.125349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-10-24T16:23:29.125509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:23:29.125537Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 34us result status StatusSuccess 2025-10-24T16:23:29.125618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:26.418955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:26.418982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.418987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:26.418995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:26.419001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:26.419005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:26.419015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:26.419027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:26.420206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:26.420848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:26.786723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:26.786749Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:26.820437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:26.820482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:26.821406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:26.837858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:26.838578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:26.839294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:26.839393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:26.845310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.845364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:26.846129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:26.846138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:26.846161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:26.846169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:26.846174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:26.846199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:26.848956Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:27.072679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:27.072755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.072818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:27.072826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:27.072897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:27.072911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:27.079062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.079126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:27.079186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.079198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:27.079203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:27.079209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:27.079928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.079938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:27.079943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:27.081400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.081410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.081422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.081430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:27.085333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:27.088041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:27.088095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:27.088307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.088338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:27.088346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.088412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:27.088420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.088453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:27.088464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:27.101557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:27.101570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... atsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:27.515173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:27.515178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:27.515182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:27.515190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:27.515203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:27.515538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:27.515573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:27.516857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:27.517130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:27.517156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:27.517167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:27.517171Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:27.517283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:27.517350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1417: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1491: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1517: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1819: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-10-24T16:23:27.517452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2092: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2152: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2210: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2296: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2362: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.517737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.524956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:27.525614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:27.525627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:27.526387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:27.526398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:27.526405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:27.526521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:631:2548] sender: [1:691:2058] recipient: [1:15:2062] 2025-10-24T16:23:27.564265Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:27.564312Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 67us result status StatusPathDoesNotExist 2025-10-24T16:23:27.564347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:27.564431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:27.564459Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2025-10-24T16:23:27.564537Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:28.509742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:28.509765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:28.509770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:28.509776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:28.509781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:28.509785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:28.509793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:28.509803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:28.510975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:28.511865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:28.761103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:28.761124Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:28.824977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:28.825008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:28.825041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:28.848946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:28.849657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:28.850351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:28.850422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:28.853289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.853332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:28.853973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:28.853982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:28.854001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:28.854007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:28.854012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:28.854032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:28.856562Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:29.135375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:29.135438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:29.135483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:29.135490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:29.140241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:29.140260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:29.141283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:29.141325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:29.141367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:29.141376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:29.141381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:29.141386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:29.141949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:29.141959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:29.141963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:29.142770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:29.142778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:29.142787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:29.142793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:29.145640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:29.148037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:29.148071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:29.148252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:29.148273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:29.148290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:29.148332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:29.148338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:29.148361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:29.148370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:29.149938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:29.149947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ntPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:29.335676Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-10-24T16:23:29.335845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-10-24T16:23:29.335879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:23:29.335972Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-10-24T16:23:29.335999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:29.336016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 2025-10-24T16:23:29.336186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-10-24T16:23:29.336407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:29.336440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:29.336733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:29.336742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:29.336764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:29.337224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:29.337234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:29.337245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:29.337431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:23:29.337439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-10-24T16:23:29.337455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:29.337459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:29.338439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:29.338449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:29.338462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:23:29.338465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-10-24T16:23:29.338485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6133: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-10-24T16:23:29.338497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:29.338500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:29.339145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:29.339159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:29.339195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-10-24T16:23:29.339258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:29.339264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-10-24T16:23:29.339275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:29.339278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:29.339329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:29.339356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:619:2528] 2025-10-24T16:23:29.339363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:29.339366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:619:2528] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:29.339436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339460Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 33us result status StatusPathDoesNotExist 2025-10-24T16:23:29.339507Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:29.339580Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2025-10-24T16:23:29.339657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TSchemeShardSubDomainTest::Restart [GOOD] >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:29.960667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:29.960696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:29.960702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:29.960709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:29.960716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:29.960721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:29.960741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:29.960755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:29.961595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:29.962322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:30.319332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:30.319363Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:30.401806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:30.401849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:30.403007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:30.412771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:30.413504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:30.414224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:30.414306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:30.417044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:30.417098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:30.417886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:30.417897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:30.417922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:30.417930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:30.417936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:30.417960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.424135Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:30.811270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:30.811360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.811435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:30.811444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:30.823376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:30.823429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:30.829026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:30.829109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:30.829184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.829197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:30.829203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:30.829209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:30.829877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.829888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:30.829893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:30.830873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.830884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.830896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:30.830903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:30.835304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:30.841507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:30.841874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:30.842106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:30.842136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:30.842144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:30.842203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:30.842211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:30.842251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:30.842264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:30.848308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:30.848319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:30.848380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:30.848387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:23:30.848453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.848462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:23:30.848476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:23:30.848481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:30.848486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:23:30.848491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:30.848496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:23:30.848501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:30.848506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:23:30.848510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:23:30.848797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:30.848806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:23:30.848811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:23:30.850256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:23:30.850278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:23:30.850285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:23:30.850291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:23:30.850298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:30.850315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:23:30.852531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:23:30.852638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-10-24T16:23:30.853085Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:276:2265] Bootstrap 2025-10-24T16:23:30.854414Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:276:2265] Become StateWork (SchemeCache [1:281:2270]) 2025-10-24T16:23:30.860868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:30.860940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.860972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-10-24T16:23:30.861191Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:23:30.868200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:30.868533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-10-24T16:23:30.868642Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-10-24T16:23:30.868697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:23:30.868705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-10-24T16:23:30.868760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:23:30.868776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:23:30.868781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:291:2280] TestWaitNotification: OK eventTxId 100 2025-10-24T16:23:30.868856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:30.868889Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusPathDoesNotExist 2025-10-24T16:23:30.868930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |54.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:30.644711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:30.644738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:30.644743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:30.644750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:30.644757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:30.644761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:30.644772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:30.645078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:30.645741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:30.646396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:31.210153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:31.210180Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:31.252527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:31.252572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:31.252626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:31.253821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:31.253966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:31.254079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:31.254166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:31.255871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:31.255925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:31.256944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:31.256954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:31.256978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:31.256986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:31.256992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:31.257016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.268049Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:31.570329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:31.570413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.570476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:31.570484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:31.584402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:31.584730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:31.592339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:31.592965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:31.593033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.593044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:31.593049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:31.593054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:31.596422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.596451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:31.596459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:31.600249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.600264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.600279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:31.600287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:31.604413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:31.612228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:31.613469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:31.613717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:31.613761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:31.613771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:31.613836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:31.613846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:31.613886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:31.613899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:31.615000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:31.615010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... BUG: schemeshard_impl.cpp:441: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:31.945364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1491: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1517: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-10-24T16:23:31.945484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1819: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-10-24T16:23:31.945533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2092: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2152: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2210: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:31.945582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:31.945587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:31.945602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2296: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2362: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2513: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2892: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2971: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3472: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3508: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3741: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3886: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3903: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3920: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4080: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4096: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4381: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4726: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.945997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4787: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.946005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4846: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.946024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4935: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.946030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4962: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.946038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4989: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-10-24T16:23:31.949753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:31.951511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:31.951524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:31.951591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:31.951599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:31.951605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:31.952305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:466:2418] sender: [1:528:2058] recipient: [1:15:2062] 2025-10-24T16:23:32.028137Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:32.028217Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 99us result status StatusSuccess 2025-10-24T16:23:32.028315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:32.028405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:32.028419Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2025-10-24T16:23:32.028470Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:32.524863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:32.524886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:32.524891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:32.524897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:32.524902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:32.524906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:32.524914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:32.524925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:32.526451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:32.527368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:32.670323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:32.670343Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:32.729614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:32.729653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:32.730974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:32.742782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:32.743475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:32.744114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:32.744191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:32.746163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:32.746206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:32.747182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:32.747193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:32.747213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:32.747220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:32.747225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:32.747247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.751272Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:32.901728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:32.901791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.901844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:32.901850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:32.905433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:32.905458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:32.908549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:32.908597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:32.908648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.908657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:32.908663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:32.908668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:32.909334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.909345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:32.909351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:32.910182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.910193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:32.910203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:32.910209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:32.914449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:32.914805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:32.916172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:32.916373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:32.916397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:32.916404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:32.916450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:32.916457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:32.916484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:32.916494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:32.917761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:32.917771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:33.136295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:33.136332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-10-24T16:23:33.136407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:33.136424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-10-24T16:23:33.136685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:33.136694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:33.136713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:33.136877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:33.136882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:33.136891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:33.137549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:33.137644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:33.137653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:33.138694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:33.138705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:33.138716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:33.138720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:33.138737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:33.138758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:33.138810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:33.138815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:33.138870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:33.138887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:33.138892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:498:2453] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:33.138958Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:33.138985Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 36us result status StatusPathDoesNotExist 2025-10-24T16:23:33.139021Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:33.139070Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:33.139086Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2025-10-24T16:23:33.139147Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-10-24T16:23:33.139208Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-10-24T16:23:33.139225Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-10-24T16:23:33.139233Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-10-24T16:23:33.139297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:33.139315Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2025-10-24T16:23:33.139376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> IndexBuildTestReboots::UniqueIndexValidationFailsInsideShard [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::SimultaneousCreateDropNfs [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:22:54.303141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:22:54.303160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.303165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:22:54.303171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:22:54.303176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:22:54.303180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:22:54.303187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:22:54.303202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:22:54.303306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:22:54.303351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:22:54.418630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:22:54.418667Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:22:54.418768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.422382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:22:54.422467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:22:54.422516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:22:54.425055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:22:54.425202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:22:54.425303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.425369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:22:54.426113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.426153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:22:54.426439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:22:54.426451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:22:54.426478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:22:54.426485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:22:54.426492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:22:54.426513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:22:54.428045Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:22:54.518733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:22:54.518791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.518841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:22:54.518848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:22:54.518883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:22:54.518895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:22:54.520550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.520595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:22:54.520637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.520661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:22:54.520667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:22:54.520672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:22:54.521257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.521268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:22:54.521273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:22:54.522353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.522363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:22:54.522368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:22:54.522373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:22:54.524752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:22:54.525217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:22:54.526030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:22:54.526215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:22:54.526239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... meshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186233409546 2025-10-24T16:23:32.445573Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-10-24T16:23:32.445619Z node 81 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-10-24T16:23:32.445683Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:32.445713Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:23:32.445777Z node 81 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-10-24T16:23:32.445930Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:32.445935Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:23:32.445944Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409547 2025-10-24T16:23:32.446281Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:32.446327Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:32.446448Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:32.446454Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:32.446473Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:32.446543Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-10-24T16:23:32.447354Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:32.447365Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:32.447398Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:32.447402Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:32.447496Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:32.447508Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:32.447513Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:32.447527Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:32.447532Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:32.447541Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:32.447641Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:32.447889Z node 81 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1003 2025-10-24T16:23:32.447932Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:23:32.447938Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-10-24T16:23:32.447951Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-10-24T16:23:32.447954Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-10-24T16:23:32.448013Z node 81 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:23:32.448026Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:23:32.448030Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [81:533:2489] 2025-10-24T16:23:32.448049Z node 81 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-10-24T16:23:32.448060Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-10-24T16:23:32.448064Z node 81 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [81:533:2489] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-10-24T16:23:32.448152Z node 81 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-10-24T16:23:32.448166Z node 81 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-10-24T16:23:32.448173Z node 81 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-10-24T16:23:32.448238Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:32.448266Z node 81 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 37us result status StatusPathDoesNotExist 2025-10-24T16:23:32.448298Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:32.448340Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:32.448354Z node 81 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 15us result status StatusSuccess 2025-10-24T16:23:32.448408Z node 81 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndDelete >> ReadSessionImplTest::DataReceivedCallback [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:27.166512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:27.166536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:27.166542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:27.166549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:27.166556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:27.166568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:27.166578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:27.166592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:27.167588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:27.168507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:27.457990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:27.458017Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:27.497413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:27.497509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:27.498062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:27.568100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:27.568204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:27.568888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.568966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:27.580169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:27.580231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:27.581542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:27.581556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:27.581854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:27.581865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:27.581872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:27.581888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.596205Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:23:27.910725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:27.910816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.910889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:27.910898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:27.929902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:27.930218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:27.931770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.932359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:27.932425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.932436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:27.932441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:27.932447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:27.943627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.943651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:27.943658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:27.944858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.944868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:27.944880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.944888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:27.949309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:27.950853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:27.952012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:27.952218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:27.952768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:27.952775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.952828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:27.952835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:27.953149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:27.953161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:27.954923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:27.954931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... ationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:32.551926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:32.556263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-10-24T16:23:32.556297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:449:2402], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-10-24T16:23:32.556310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:449:2402], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-10-24T16:23:32.556323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-10-24T16:23:32.556349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-10-24T16:23:32.556361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 129 -> 240 2025-10-24T16:23:32.558031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:32.558051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:32.558056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:32.558062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-10-24T16:23:32.558069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-10-24T16:23:32.564234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:32.564254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:32.564259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:32.564264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:32.564270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-10-24T16:23:32.564288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-10-24T16:23:32.565125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:32.565138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-10-24T16:23:32.565214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-10-24T16:23:32.565268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:32.565273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:32.565279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:32.565283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:32.565288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-10-24T16:23:32.565301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:554:2494] message: TxId: 104 2025-10-24T16:23:32.565307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:32.565312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:23:32.565317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:23:32.565333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-10-24T16:23:32.565462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:32.565470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:32.565645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:32.567125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:32.567696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:32.567708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:449:2402], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-10-24T16:23:32.567724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:23:32.567730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:818:2736] 2025-10-24T16:23:32.567895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-10-24T16:23:32.568153Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:23:32.568183Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 38us result status StatusSuccess 2025-10-24T16:23:32.568279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> BasicUsage::BrokenCredentialsProvider [GOOD] >> KqpBatchDelete::MultiStatement >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> KqpBatchUpdate::Returning >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:23.508773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:23.508796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.508801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:23.508808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:23.508814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:23.508819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:23.508828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:23.508839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:23.509653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:23.510349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:23.606469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:23.606487Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.624183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:23.624206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:23.624250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:23.626349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:23.626476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:23.626593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.626668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:23.627356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.627388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:23.627546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.627555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:23.627577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:23.627584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:23.627590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:23.627609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.628595Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:23.689450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:23.689516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.689573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:23.689579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:23.691804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:23.691823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:23.693067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.693111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:23.693157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.693166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:23.693171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:23.693176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:23.693779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.693789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:23.693793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:23.694504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.694514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:23.694523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.694536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:23.697898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:23.698279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:23.699166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:23.699334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:23.699355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:23.699362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.699416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:23.699422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:23.699450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:23.699461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:23.700266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:23.700273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... nId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:36.284822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-10-24T16:23:36.284852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-10-24T16:23:36.284862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-10-24T16:23:36.284872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-10-24T16:23:36.284893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-10-24T16:23:36.284903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 129 -> 240 2025-10-24T16:23:36.285168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:36.285179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:36.285186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:36.285192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-10-24T16:23:36.285197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-10-24T16:23:36.285328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:36.285337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-10-24T16:23:36.285340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-10-24T16:23:36.285344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:36.285348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-10-24T16:23:36.285356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-10-24T16:23:36.285828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-10-24T16:23:36.285837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-10-24T16:23:36.285908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-10-24T16:23:36.285954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:36.285959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:36.285963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:23:36.285966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:36.285971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-10-24T16:23:36.285982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:550:2490] message: TxId: 104 2025-10-24T16:23:36.285987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:23:36.285992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:23:36.285999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:23:36.286014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-10-24T16:23:36.286177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-10-24T16:23:36.286183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-10-24T16:23:36.286252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:36.292010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-10-24T16:23:36.292319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-10-24T16:23:36.292328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:451:2404], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-10-24T16:23:36.292718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:23:36.292725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1423:3332] 2025-10-24T16:23:36.292814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-10-24T16:23:36.293425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:23:36.293460Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 44us result status StatusSuccess 2025-10-24T16:23:36.296411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-10-24T16:23:06.815329Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.815337Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.815341Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.815451Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.827932Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.828032Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.828196Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.828370Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.829276Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.829313Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.829326Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-10-24T16:23:06.829564Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.829569Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.829573Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.832999Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.847920Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.848006Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.848101Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.848238Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.849526Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.849569Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.849592Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-10-24T16:23:06.849970Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.849976Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.849980Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.856357Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.857501Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.857669Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.859930Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.860254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.861176Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.863893Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.863915Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-10-24T16:23:06.867439Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.867446Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.867449Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.877708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.884616Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.884699Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.884826Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.885638Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.887960Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.889835Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.889853Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-10-24T16:23:06.896230Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.896236Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.896241Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.898982Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.899208Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.899250Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.899890Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.899970Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.905991Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.906331Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.906349Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-10-24T16:23:06.906612Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.906617Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.906620Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.907976Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.919950Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.920023Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.920122Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.920223Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.920371Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.923926Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.923948Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-10-24T16:23:06.932275Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.932282Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.932286Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.932362Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.932547Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.932570Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.933239Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.935809Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.939914Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.939961Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.939974Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-10-24T16:23:06.940310Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.940314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.940317Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:06.940387Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:06.943949Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:06.944087Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.947927Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:06.948215Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:06.948280Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:06.948297Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:06.948305Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-10-24T16:23:07.031920Z :ReadSession INFO: Random seed for debugging is 1761322987031911 2025-10-24T16:23:07.319609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824629208765691:2077];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:07.319641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:07.325351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:07.325493Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.329 ... : 1 2025-10-24T16:23:32.755969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:32.856519Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:32.856532Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:32.856535Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:32.959911Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:32.959923Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:32.959926Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.060173Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.060189Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.060192Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.163973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.163987Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.163990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.268088Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.268103Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.268106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.368345Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.368358Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.368361Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.472186Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.472204Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.472207Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.583901Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.583918Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.583921Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.628219Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/user session shared/user_1_1_8120424860974178374_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2025-10-24T16:23:33.684172Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.684186Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.684188Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.719893Z :INFO: [/Root] [/Root] [190b1191-e02a54d5-7e0ae0c9-2680bb50] Closing read session. Close timeout: 0.000000s 2025-10-24T16:23:33.719919Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-10-24T16:23:33.719931Z :INFO: [/Root] [/Root] [190b1191-e02a54d5-7e0ae0c9-2680bb50] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16472 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:23:33.719955Z :NOTICE: [/Root] [/Root] [190b1191-e02a54d5-7e0ae0c9-2680bb50] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:23:33.719967Z :DEBUG: [/Root] [/Root] [190b1191-e02a54d5-7e0ae0c9-2680bb50] [dc1] Abort session to cluster 2025-10-24T16:23:33.720175Z :NOTICE: [/Root] [/Root] [190b1191-e02a54d5-7e0ae0c9-2680bb50] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:23:33.721155Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_8120424860974178374_v1 grpc read done: success# 0, data# { } 2025-10-24T16:23:33.721169Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_8120424860974178374_v1 grpc read failed 2025-10-24T16:23:33.721176Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_8120424860974178374_v1 grpc closed 2025-10-24T16:23:33.721191Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_8120424860974178374_v1 is DEAD 2025-10-24T16:23:33.721876Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_8120424860974178374_v1 2025-10-24T16:23:33.721885Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [1:7564824672158441050:2520] destroyed 2025-10-24T16:23:33.722236Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_8120424860974178374_v1 2025-10-24T16:23:33.722460Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [1:7564824672158441048:2517] disconnected. 2025-10-24T16:23:33.722476Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [1:7564824672158441048:2517] disconnected; active server actors: 1 2025-10-24T16:23:33.722479Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [1:7564824672158441048:2517] client user disconnected session shared/user_1_1_8120424860974178374_v1 2025-10-24T16:23:33.794501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.794515Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.794518Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.896160Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.896178Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.896181Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:34.000051Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:34.000065Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:34.000067Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:34.120030Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:34.120052Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:34.120055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:34.224331Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:34.224347Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:34.224350Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:34.799679Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:34.799687Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:34.799690Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:34.804113Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:34.815912Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:34.816001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:34.816123Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:34.816300Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:34.819268Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:34.820591Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-10-24T16:23:34.820613Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:34.820628Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:34.820636Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-10-24T16:23:34.824153Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-10-24T16:23:34.824169Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> KqpBatchUpdate::ColumnTable >> KqpBatchDelete::SimpleOnePartition >> KqpBatchUpdate::HasTxControl |54.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> KqpBatchUpdate::UpdateOn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:36.281534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:36.281558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:36.281563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:36.281569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:36.281576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:36.281579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:36.281589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:36.281602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:36.282293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:36.283318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:36.493271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:36.493299Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:36.563028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:36.563062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:36.564006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:36.588311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:36.597940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:36.598291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.598618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:36.606162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:36.606219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:36.607348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:36.607357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:36.607377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:36.607384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:36.607389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:36.607407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.617686Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:36.923529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:36.923596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.923648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:36.923654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:36.923717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:36.923730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:36.929778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.929838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:36.929897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.929908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:36.929913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:36.929918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:36.935210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.935236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:36.935243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:36.937242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.937262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.937275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.937283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:36.940237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:36.941439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:36.941503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:36.941726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.941760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:36.941769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.941830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:36.941838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.941874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:36.941887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:36.943769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:36.943783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1435 } } CommitVersion { Step: 140 TxId: 101 } 2025-10-24T16:23:37.301871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-10-24T16:23:37.301878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-10-24T16:23:37.301889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 621 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-10-24T16:23:37.301895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:23:37.301904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 621 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-10-24T16:23:37.301915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.301919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.301923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-10-24T16:23:37.301928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 129 -> 240 2025-10-24T16:23:37.307177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:37.307200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:37.307212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.307231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.307248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.307255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:37.307267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:37.307272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.307277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:37.307280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.307284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-10-24T16:23:37.307298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:282:2271] message: TxId: 101 2025-10-24T16:23:37.307304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.307308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:37.307312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:37.307334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:37.308214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:37.308225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:283:2272] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:37.308311Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.308349Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 46us result status StatusSuccess 2025-10-24T16:23:37.308448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:37.308531Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.308550Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 20us result status StatusSuccess 2025-10-24T16:23:37.308621Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:37.338033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:37.338051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.338056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:37.338061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:37.338066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:37.338069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:37.338076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.338085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:37.338933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:37.338976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:37.498311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:37.498333Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:37.504301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:37.504372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:37.504417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:37.506947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:37.507011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:37.507119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.507187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:37.507711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.507753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:37.507999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.508010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.508029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:37.508037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:37.508043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:37.508055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.510105Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:23:37.631601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:37.631671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.631716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:37.631723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:37.631787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:37.631796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:37.632339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:37.632420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:37.632431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:37.632435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:37.632719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:37.632965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.632980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.632986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:37.633504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:37.635056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:37.635087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:37.635244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.635263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:37.635269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.635916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:37.635923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.636234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.636245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:37.637092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.637099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... create, do next state 2025-10-24T16:23:37.747201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 2 -> 3 2025-10-24T16:23:37.748093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.748104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:37.748110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 3 -> 128 2025-10-24T16:23:37.748663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.748673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.748679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-10-24T16:23:37.748685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-10-24T16:23:37.748709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:37.749510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-10-24T16:23:37.749535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-10-24T16:23:37.749604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.749622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:37.749632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-10-24T16:23:37.749688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 108:0 128 -> 240 2025-10-24T16:23:37.749695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-10-24T16:23:37.749719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:37.749730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-10-24T16:23:37.750520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.750528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:37.750563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.750568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2212], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-10-24T16:23:37.750579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.750597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-10-24T16:23:37.750609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:23:37.750614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:37.750620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#108:0 progress is 1/1 2025-10-24T16:23:37.750624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:37.750629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-10-24T16:23:37.750634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-10-24T16:23:37.750640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 108:0 2025-10-24T16:23:37.750644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 108:0 2025-10-24T16:23:37.750654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:23:37.750671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-10-24T16:23:37.750678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-10-24T16:23:37.750828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:37.750840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-10-24T16:23:37.750845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-10-24T16:23:37.750850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-10-24T16:23:37.750856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:37.750868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-10-24T16:23:37.752079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-10-24T16:23:37.752145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-10-24T16:23:37.752152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-10-24T16:23:37.752229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-10-24T16:23:37.752244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-10-24T16:23:37.752249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:595:2550] TestWaitNotification: OK eventTxId 108 2025-10-24T16:23:37.752356Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.752385Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 37us result status StatusSuccess 2025-10-24T16:23:37.752472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> KqpBatchDelete::DeleteOn >> KqpBatchDelete::Returning >> KqpBatchDelete::ManyPartitions_2 >> KqpBatchDelete::Large_2 >> KqpBatchUpdate::ManyPartitions_2 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-10-24T16:23:11.948326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:11.948593Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824647480986951:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:11.948634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:12.141223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:12.141245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:12.143964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:12.148611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10116, node 1 2025-10-24T16:23:12.178617Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:12.204100Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824647480986734:2081] 1761322991909422 != 1761322991909425 2025-10-24T16:23:12.271379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:12.271392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:12.271394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:12.271441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:12.334936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:12.339537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:12.392331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:12.438357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:12.439099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:23:12.441443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-10-24T16:23:12.441754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:12.442410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:12.915933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:13.093865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824656070922057:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:13.093899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:13.094041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824656070922069:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:13.094055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824656070922070:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:13.094128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:13.095003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:13.097166Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824656070922073:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-10-24T16:23:13.185215Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824656070922124:2372] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:13.623934Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:13.623981Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:13.648636Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:13.648663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:13.650754Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:13.653806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:13.655962Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824655804683228:2081] 1761322993583942 != 1761322993583945 TServer::EnableGrpc on GrpcPort 17202, node 2 2025-10-24T16:23:13.709507Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:13.709520Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:13.709521Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:13.709578Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathI ... agic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:20.770249Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 13512, node 5 TClient is connected to server localhost:22896 2025-10-24T16:23:20.866006Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:20.866027Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:20.866032Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:20.866101Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100100111","FieldsRequired":"00000000000000000000000000100101","Problems":["no-database-board-info"],"Nodes":[{"NodeId":6,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[123.90380859375,51.82421875,19.8740234375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-aakgzge3ui.auto.internal","Version":".8485dfb","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-10-24T16:23:22.853594Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:22.854758Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:22.855141Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:538:2417], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:22.855191Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:22.855208Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:23.048607Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:23.086762Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:23.086806Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:23.112996Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:23.241637Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:23:23.283959Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:23.684906Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 21494, node 7 TClient is connected to server localhost:10048 2025-10-24T16:23:23.955165Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:23.955185Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:23.955190Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:23.955301Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100000111","FieldsRequired":"00000000000000000000000000000101","Nodes":[{"NodeId":9,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[123.90380859375,51.82421875,19.8740234375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-aakgzge3ui.auto.internal","Version":".8485dfb","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-10-24T16:23:26.459131Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:26.464465Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:26.464873Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:541:2419], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:26.464951Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:26.464960Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:26.798448Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:26.852485Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:26.852518Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:26.892481Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:27.304833Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:23:27.336138Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:28.201616Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19701, node 10 TClient is connected to server localhost:17484 2025-10-24T16:23:28.532555Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:28.532572Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:28.532576Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:28.532663Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100000111","FieldsRequired":"00000000000000000000000000000101","Nodes":[{"NodeId":11,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[126.15380859375,53.4873046875,20.583984375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-aakgzge3ui.auto.internal","Version":".8485dfb","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-10-24T16:23:31.494554Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:31.495798Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:31.496260Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:625:2420], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:31.496312Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:31.496340Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:31.824395Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:31.880914Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:31.880946Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:31.928919Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:32.370970Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:23:32.420069Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:33.271185Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27792, node 13 TClient is connected to server localhost:16731 2025-10-24T16:23:33.728127Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:33.728147Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:33.728151Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:33.729475Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"2","FoundNodes":"2","FieldsAvailable":"10000000010100110111111100100111","FieldsRequired":"00000000000000000000000000100101","Nodes":[{"NodeId":15,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[134.3095703125,56.384765625,21.6982421875],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-aakgzge3ui.auto.internal","Version":".8485dfb","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}},{"NodeId":16,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[134.3095703125,56.384765625,21.6982421875],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-aakgzge3ui.auto.internal","Version":".8485dfb","Location":{"DataCenter":"4","Module":"4","Rack":"4","Unit":"4"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64},"Tablets":[{"Type":"DataShard","State":"Green","Count":1}]}]} |54.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-10-24T16:23:07.128278Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1761322987128270 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001395/r3tmp/tmpfAsSnd/pdisk_1.dat 2025-10-24T16:23:07.592862Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.592545Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.674292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:07.674356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:07.676586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.676608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.682257Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:07.684566Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:07.685048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:07.686027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.686049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.692330Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:07.696272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19731, node 1 2025-10-24T16:23:07.812114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001395/r3tmp/yandexNQSUl4.tmp 2025-10-24T16:23:07.812130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001395/r3tmp/yandexNQSUl4.tmp 2025-10-24T16:23:07.812202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001395/r3tmp/yandexNQSUl4.tmp 2025-10-24T16:23:07.812256Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:07.819633Z INFO: TTestServer started on Port 25727 GrpcPort 19731 TClient is connected to server localhost:25727 PQClient connected to localhost:19731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:23:07.953928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:08.025781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:23:08.157359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-10-24T16:23:08.572288Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:08.591903Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:09.537884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824636846500557:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.537908Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824636846500570:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.537916Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.538489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824636846500574:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.538499Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.539765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:09.560847Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824636846500573:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:23:09.655840Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824636846500603:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:09.659978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:09.664480Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824635686695753:2331], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:09.664549Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZTE0ODg3NDctMmRmNDc3ZC1lY2EwYTMxYS1kYjg3NWZh, ActorId: [1:7564824635686695727:2324], ActorState: ExecuteState, TraceId: 01k8bgfy1457pjnvj50p3e6k4c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:09.665021Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:23:09.667093Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824636846500610:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:09.667681Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=OTg3ZDBlYy1mNTRmMTJmNC03NzdmYTljZC05NDVmMTVkYw==, ActorId: [2:7564824636846500555:2296], ActorState: ExecuteState, TraceId: 01k8bgfxz03559myx96xb21jsc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:09.667799Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:23:09.709902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2814749767 ... __balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [5:7564824749542626969:2450] disconnected. 2025-10-24T16:23:35.724029Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [5:7564824749542626969:2450] disconnected; active server actors: 1 2025-10-24T16:23:35.724032Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1695: [72075186224037893][rt3.dc1--test-topic] pipe [5:7564824749542626969:2450] disconnected no session 2025-10-24T16:23:35.769230Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72075186224037892] server connected, pipe [5:7564824749542626992:2450], now have 1 active actors on pipe 2025-10-24T16:23:35.769482Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:23:35.769491Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:23:35.769513Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|744fad26-889546dd-f215cd6d-74835a82_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-10-24T16:23:35.769520Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:35.769525Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:35.769527Z node 6 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:23:35.769539Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:35.769551Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-10-24T16:23:35.769565Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:23:35.769816Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:23:35.769820Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:23:35.769830Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:23:35.770285Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1761323015770 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:23:35.770319Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|744fad26-889546dd-f215cd6d-74835a82_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-10-24T16:23:35.768542Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [5:7564824749542626931:2450] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-10-24T16:23:35.768558Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [5:7564824749542626931:2450] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-10-24T16:23:35.768563Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [5:7564824749542626931:2450] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-10-24T16:23:35.768573Z node 5 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-10-24T16:23:35.769353Z node 5 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-10-24T16:23:35.769946Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|744fad26-889546dd-f215cd6d-74835a82_0 2025-10-24T16:23:35.773251Z :INFO: [] MessageGroupId [src] SessionId [src|744fad26-889546dd-f215cd6d-74835a82_0] Write session: close. Timeout = 0 ms 2025-10-24T16:23:35.773258Z :INFO: [] MessageGroupId [src] SessionId [src|744fad26-889546dd-f215cd6d-74835a82_0] Write session will now close 2025-10-24T16:23:35.773264Z :DEBUG: [] MessageGroupId [src] SessionId [src|744fad26-889546dd-f215cd6d-74835a82_0] Write session: aborting 2025-10-24T16:23:35.773396Z :INFO: [] MessageGroupId [src] SessionId [src|744fad26-889546dd-f215cd6d-74835a82_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:23:35.773401Z :DEBUG: [] MessageGroupId [src] SessionId [src|744fad26-889546dd-f215cd6d-74835a82_0] Write session: destroy 2025-10-24T16:23:35.774288Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [5:7564824749542626992:2450] destroyed 2025-10-24T16:23:35.774300Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:23:35.774305Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:35.774307Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:35.774308Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:35.773907Z node 5 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|744fad26-889546dd-f215cd6d-74835a82_0 grpc read done: success: 0 data: 2025-10-24T16:23:35.773915Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|744fad26-889546dd-f215cd6d-74835a82_0 grpc read failed 2025-10-24T16:23:35.773920Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|744fad26-889546dd-f215cd6d-74835a82_0 grpc closed 2025-10-24T16:23:35.773924Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|744fad26-889546dd-f215cd6d-74835a82_0 is DEAD 2025-10-24T16:23:35.774083Z node 5 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:23:35.796115Z :INFO: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Starting read session 2025-10-24T16:23:35.796133Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Starting session to cluster null (localhost:15646) 2025-10-24T16:23:35.798957Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:35.798962Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:35.798966Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] Reconnecting session to cluster null in 0.000000s 2025-10-24T16:23:35.799023Z :ERROR: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-10-24T16:23:35.799028Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:35.799031Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:35.799043Z :INFO: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-10-24T16:23:35.799078Z :NOTICE: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:23:35.799082Z :DEBUG: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-10-24T16:23:35.799091Z :INFO: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Closing read session. Close timeout: 0.000000s 2025-10-24T16:23:35.799096Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:23:35.799101Z :INFO: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:23:35.799106Z :NOTICE: [/Root] [/Root] [2f85a522-f171a9-f40aadfa-80f7f644] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:23:35.800087Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:35.800097Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:35.800099Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:35.904117Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:35.904129Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:35.904131Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:36.007977Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:36.007987Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:36.007990Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:36.107979Z node 6 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:36.107989Z node 6 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:36.107992Z node 6 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:37.482576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:37.482597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.482602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:37.482608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:37.482613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:37.482617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:37.482625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.482636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:37.483716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:37.484542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:37.580862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:37.580883Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:37.604832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:37.604860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:37.605933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:37.612137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:37.612848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:37.613244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.613611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:37.615999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.616039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:37.616722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.616734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.616757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:37.616765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:37.616771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:37.616792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.619783Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:37.806258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:37.806325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.806376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:37.806385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:37.806456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:37.806470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:37.812349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.812414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:37.812484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.812498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:37.812503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:37.812509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:37.815130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.815147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:37.815153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:37.824293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.824314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.824326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.824334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:37.832407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:37.840660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:37.840707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:37.840902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.840933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:37.840942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.840996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:37.841003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.841033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.841044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:37.847177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.847191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... nerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:37.888825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5622: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:37.888849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 101:0 128 -> 130 2025-10-24T16:23:37.888870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.888878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:37.889672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:37.890181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-10-24T16:23:37.890293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.890300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:37.890331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:37.890354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.890360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-10-24T16:23:37.890366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:37.890436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.890443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-10-24T16:23:37.890451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:37.890455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.890461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:37.890465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.890469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:37.890474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:37.890479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:37.890483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:37.890494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:37.890500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:37.890504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:37.890507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-10-24T16:23:37.890596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:37.890607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:37.890612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:37.890619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:37.890624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:37.890757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:37.890769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:37.890773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:37.890778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:37.890782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:37.890794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:37.891141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:37.891150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:37.891168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:37.891235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:37.891242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:37.891251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.891797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:37.895146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:37.895170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:37.895181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:37.895228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:37.895235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:37.895303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:37.895320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:37.895325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:343:2332] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:37.895395Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.895423Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 39us result status StatusPathDoesNotExist 2025-10-24T16:23:37.895462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:36.980169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:36.980191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:36.980197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:36.980204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:36.980210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:36.980215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:36.980224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:36.980238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:36.980364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:36.980415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:37.051260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:37.051283Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:37.074053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:37.074089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:37.075268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:37.079369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:37.079792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:37.080166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.080239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:37.082676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.082725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:37.083670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.083684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:37.083707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:37.083714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:37.083720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:37.083739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.086410Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:37.199295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:37.199364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.199418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:37.199425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:37.199486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:37.199499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:37.200164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:37.200266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:37.200281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:37.200286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:37.200641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:37.200924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:37.200943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.200949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:37.201579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:37.201976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:37.202018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:37.202194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:37.202216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:37.202222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.202266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:37.202271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:37.202296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.202305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:37.202668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:37.202677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... G: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:8 2025-10-24T16:23:37.980760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-10-24T16:23:37.980770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:12 2025-10-24T16:23:37.980774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-10-24T16:23:37.981726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-10-24T16:23:37.981738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-10-24T16:23:37.981750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:16 2025-10-24T16:23:37.981755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-10-24T16:23:37.981781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:25 2025-10-24T16:23:37.981785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-10-24T16:23:37.981872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:20 2025-10-24T16:23:37.981877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-10-24T16:23:37.982087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:29 2025-10-24T16:23:37.982093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-10-24T16:23:37.982116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:33 2025-10-24T16:23:37.982120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-10-24T16:23:37.982435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:37.982440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:37.982448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:37 2025-10-24T16:23:37.982453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-10-24T16:23:37.982714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:42 2025-10-24T16:23:37.982721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-10-24T16:23:37.982791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:23:37.982797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-10-24T16:23:37.982975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2025-10-24T16:23:37.982981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-10-24T16:23:37.982991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2025-10-24T16:23:37.982995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-10-24T16:23:37.983007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2025-10-24T16:23:37.983011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-10-24T16:23:37.983028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2025-10-24T16:23:37.983031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-10-24T16:23:37.983044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2025-10-24T16:23:37.983047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-10-24T16:23:37.983059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2025-10-24T16:23:37.983062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-10-24T16:23:37.983171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2025-10-24T16:23:37.983177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-10-24T16:23:37.983192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:37.983196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:37.983207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2025-10-24T16:23:37.983213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-10-24T16:23:37.983255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:37.983276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:37.983285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:37.983291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:37.983308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:37.983967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:23:37.984032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:37.984039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:37.984108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:37.984127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:37.984131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2062:3665] TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:37.984208Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.984244Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 51us result status StatusPathDoesNotExist 2025-10-24T16:23:37.984285Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:37.984357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:37.984371Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 16us result status StatusPathDoesNotExist 2025-10-24T16:23:37.984387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::OverlayMap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:38.022124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:38.022148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:38.022153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:38.022158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:38.022164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:38.022168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:38.022177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:38.022486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:38.023449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:38.024543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:38.055911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:38.055935Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.064452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:38.064498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:38.064533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:38.067659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:38.067818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:38.067943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.068013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:38.068901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.068941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:38.069179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.069191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.069215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:38.069223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:38.069230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:38.069253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.070397Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:38.088644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:38.088696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.088734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:38.088739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:38.088790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:38.088800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:38.089305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.089351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:38.089401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.089410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:38.089415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:38.089421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:38.089788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.089799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:38.089804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:38.090177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.090190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.090202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.090209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:38.090649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:38.091056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:38.091083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:38.091254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.091271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:38.091276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.091314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:38.091319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.091341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:38.091352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:38.091748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.091756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... t, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:38.217216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:38.217221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:23:38.217231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:23:38.217370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 502 RawX2: 4294969750 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-10-24T16:23:38.217377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-10-24T16:23:38.217390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 502 RawX2: 4294969750 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-10-24T16:23:38.217496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:23:38.217508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 502 RawX2: 4294969750 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-10-24T16:23:38.217524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.217528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.217533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-10-24T16:23:38.217539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:23:38.222752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:38.223358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.223400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:38.223416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.223501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.223512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:23:38.223530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:38.223535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:38.223557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:23:38.223562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:38.223567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:23:38.223589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:313:2302] message: TxId: 102 2025-10-24T16:23:38.223597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:23:38.223603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:23:38.223608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:23:38.223636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:38.224208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:38.224224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:530:2474] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:38.224349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.224403Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 63us result status StatusSuccess 2025-10-24T16:23:38.224540Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:38.224657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.224689Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 34us result status StatusSuccess 2025-10-24T16:23:38.224909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::OverlayMap [GOOD] >> KqpBatchUpdate::UnknownColumn >> BsControllerConfig::SelectAllGroups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:38.061653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:38.061674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:38.061679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:38.061684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:38.061690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:38.061694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:38.061703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:38.061714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:38.061809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:38.061865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:38.076692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:38.076710Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.079802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:38.079833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:38.079901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:38.081195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:38.081365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:38.081482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.081562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:38.082345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.082388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:38.082594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.082602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.082624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:38.082633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:38.082638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:38.082657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.083835Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:38.105782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:38.105842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.105888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:38.105896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:38.105971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:38.105986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:38.108205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.108260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:38.108318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.108328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:38.108334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:38.108340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:38.108876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.108891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:38.108900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:38.109313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.109325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.109338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.109346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:38.110123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:38.110592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:38.110650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:38.110864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.110891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:38.110900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.110953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:38.110960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.110989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:38.111003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:38.111439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.111448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... UG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:38.125525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.125533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-10-24T16:23:38.125544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:38.125548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:38.125554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:38.125558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:38.125563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:38.125567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:38.125572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:38.125577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:38.125589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:38.125594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:38.125599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:38.125603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-10-24T16:23:38.125715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:38.125728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:38.125733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:38.125740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:38.125746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:38.125875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:38.125887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:38.125892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:38.125897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:38.125917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:38.125931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:38.126163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:38.126174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:38.126194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:38.126234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:38.126240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:38.126249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:38.126652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:38.127116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:38.127140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:38.127152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:38.127203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:38.127211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:38.127279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:38.127296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:38.127301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:346:2335] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:38.127373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.127401Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 36us result status StatusPathDoesNotExist 2025-10-24T16:23:38.127452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:38.127525Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.127545Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 23us result status StatusSuccess 2025-10-24T16:23:38.127641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:37.991627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:37.991648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.991654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:37.991660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:37.991666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:37.991670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:37.991680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:37.991691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:37.991789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:37.991840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:38.036748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:38.036768Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.040060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:38.040090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:38.040135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:38.041406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:38.041556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:38.041663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.041734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:38.042749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.042796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:38.043020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.043031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:38.043088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:38.043097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:38.043103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:38.043126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.044361Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:38.066133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:38.066195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.066245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:38.066253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:38.066332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:38.066348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:38.066883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.066927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:38.066978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.066986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:38.066992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:38.066996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:38.067342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.067353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:38.067358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:38.067643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.067655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:38.067666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.067672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:38.068324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:38.068715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:38.068759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:38.068952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:38.068976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:38.068983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.069031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:38.069037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:38.069061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:38.069072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:38.069461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:38.069470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... -10-24T16:23:38.167880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:38.167918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:38.168322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:38.168334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:38.168360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:38.168589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-10-24T16:23:38.168599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-10-24T16:23:38.168614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-10-24T16:23:38.168618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-10-24T16:23:38.168646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:38.168662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6133: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-10-24T16:23:38.168684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:38.168689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:38.168701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:38.169439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:38.169451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:38.169470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:38.169474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-10-24T16:23:38.169486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-10-24T16:23:38.169491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-10-24T16:23:38.169498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:38.169501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:38.169512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-10-24T16:23:38.169518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-10-24T16:23:38.169541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:38.169840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-10-24T16:23:38.169895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:38.169903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-10-24T16:23:38.169916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:38.169920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:38.169985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:38.170012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:713:2607] 2025-10-24T16:23:38.170031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:38.170051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:713:2607] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:38.170130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170168Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 51us result status StatusPathDoesNotExist 2025-10-24T16:23:38.170212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170273Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 15us result status StatusPathDoesNotExist 2025-10-24T16:23:38.170288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:38.170349Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 25us result status StatusSuccess 2025-10-24T16:23:38.170428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |54.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> KqpBatchUpdate::ColumnTable [GOOD] >> KqpBatchDelete::Returning [GOOD] >> KqpBatchDelete::ManyPartitions_3 |54.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> BsControllerConfig::PDiskCreate >> KqpBatchDelete::DeleteOn [GOOD] |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |54.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> KqpBatchUpdate::Large_1 >> KqpBatchDelete::SimplePartitions |54.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |54.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |54.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo >> KqpBatchDelete::HasTxControl >> BsControllerConfig::Basic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 32764, MsgBus: 10972 2025-10-24T16:23:37.987341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824759220065564:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:37.987384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:37.991970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0015a4/r3tmp/tmp49Pwx0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32764, node 1 2025-10-24T16:23:38.082369Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.092054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.092066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.092068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.092103Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:38.101817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.101836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.102622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10972 2025-10-24T16:23:38.126251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.223175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.233215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:38.507578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824763515033407:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.507601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824763515033398:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.507655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.508075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824763515033413:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.508098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.508441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:38.511018Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824763515033412:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:38.601210Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824763515033465:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:38.666193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:23:38.760381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:23:38.760421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:23:38.760448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:23:38.760460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:23:38.760475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:23:38.760487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:23:38.760501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:23:38.760514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:23:38.760527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:23:38.760540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:23:38.760553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:23:38.760567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:23:38.760581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;self_id=[1:7564824763515034063:2342];tablet_id=72075186224037947;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:23:38.760937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824763515033825:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:23:38.760951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824763515033825:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:23:38.761193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824763515033825:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:23:38.761217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564824763515033825:2331];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16 ... found; 2025-10-24T16:23:39.021719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.021732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.021735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.022505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.022516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.022519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.023208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.024804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.025455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.025469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.025471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.026740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.027351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.027364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.027366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.028664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.029531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.029546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.029549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.030317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:39.030405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.030416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.030419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.031110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.031132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.031134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:23:39.122480Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=NDYwNGUyOGUtOTI4ODE4ZGUtZmMyNTdmNjktMTI5MWI2YWE=, ActorId: [1:7564824763515033371:2316], ActorState: ExecuteState, TraceId: 01k8bggtt800b8b9rqzfr22tcs, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. |54.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> BsControllerConfig::AddDriveSerial ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 15047, MsgBus: 13132 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00159c/r3tmp/tmpBL2ZQd/pdisk_1.dat 2025-10-24T16:23:38.261417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.261452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.261646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:38.261694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:38.264505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.276228Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.276502Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824764287397384:2081] 1761323018199421 != 1761323018199424 TServer::EnableGrpc on GrpcPort 15047, node 1 2025-10-24T16:23:38.313504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.313517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.313518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.313553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13132 TClient is connected to server localhost:13132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.455558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.458635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:23:38.461773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.488685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:38.490582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.511797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.521593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.692272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764287399028:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.692297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.692444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764287399038:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.692452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.757409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.778955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.791388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.804842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.815426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.828874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.847587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.861051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.888419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764287399898:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.888441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.888584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764287399903:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.888592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764287399904:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.888646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.889445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:38.894656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:23:38.894836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6272: Got TEvUpdateAck for unknown txId 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:23:38.895009Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824764287399907:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:38.988217Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824764287399959:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:39.204766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:39.226919Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824768582367559:2527], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2025-10-24T16:23:39.227104Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NDc5ZTZkNTUtYjVkODlmMTMtOTMxYTk3YjYtMTMyNzBjMGU=, ActorId: [1:7564824768582367550:2521], ActorState: ExecuteState, TraceId: 01k8bggtypayrcs3n70nq8tv10, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn [GOOD] Test command err: Trying to start YDB, gRPC: 23080, MsgBus: 24967 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001595/r3tmp/tmpCICd4m/pdisk_1.dat 2025-10-24T16:23:38.334115Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.334821Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824761518050205:2081] 1761323018243260 != 1761323018243263 2025-10-24T16:23:38.334848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23080, node 1 2025-10-24T16:23:38.364305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.364339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.368223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.396066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.396077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.396079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.396115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24967 TClient is connected to server localhost:24967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.501477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.512053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:38.522554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.543070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.575825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.589253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.732313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761518051856:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.732338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.732505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761518051866:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.732513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.795303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.808661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.821258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.840766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.852873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.864716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.879048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.894934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.916733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761518052728:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.916757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.916934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761518052733:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.916944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761518052734:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.916950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.917831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:38.926169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824761518052737:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:39.013906Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824765813020085:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:39.235480Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824765813020389:2527], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with ON 2025-10-24T16:23:39.235932Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=YzJjMmYzNzAtNDE4NzBiNmQtNDQ0ZGIyMjctMzY3ODQwYmQ=, ActorId: [1:7564824765813020380:2521], ActorState: ExecuteState, TraceId: 01k8bggtz1fza9an5m6gmp7m5g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:39.261399Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |54.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> KqpBatchUpdate::UnknownColumn [GOOD] >> KqpBatchUpdate::UpdateOn [GOOD] >> KqpBatchUpdate::Large_2 |54.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex >> BsControllerConfig::SelectAllGroups [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:39.248760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:39.248792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:39.248799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:39.248808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:39.248815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:39.248821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:39.248833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:39.248848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:39.249256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:39.249615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:39.324532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:39.324565Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:39.340247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:39.340299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:39.340363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:39.353622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:39.353852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:39.354021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:39.354135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:39.356668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:39.356743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:39.357070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:39.357085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:39.357114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:39.357124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:39.357131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:39.357166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.364215Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:39.429946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:39.430044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.430123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:39.430132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:39.430216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:39.430232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:39.436290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:39.436382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:39.436476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.436492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:39.436498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:39.436505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:39.437231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.437245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:39.437251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:39.437606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.437618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:39.437630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:39.437637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:39.438422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:39.444197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:39.444295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:39.444569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:39.444617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:39.444627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:39.444709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:39.444718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:39.444758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:39.444773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:39.445463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:39.445473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ode 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-10-24T16:23:40.484357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:40.484360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-10-24T16:23:40.484500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:40.484513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:40.484519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:23:40.484525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:40.484529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:40.484626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:40.484709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:23:40.484714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:23:40.484719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:23:40.484723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:23:40.484732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-10-24T16:23:40.485306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:23:40.485327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:40.485334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:23:40.485630Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-10-24T16:23:40.485867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:40.485950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-10-24T16:23:40.486174Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-10-24T16:23:40.487011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-10-24T16:23:40.487070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-10-24T16:23:40.487325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:23:40.487354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:40.487360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:40.487498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:40.487605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:23:40.487695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:40.487703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:23:40.487717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:40.492432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:23:40.492457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-10-24T16:23:40.492891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-10-24T16:23:40.492906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-10-24T16:23:40.492937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:40.493208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:23:40.493297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:23:40.493305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:23:40.493401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:23:40.493429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:23:40.493434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2103:3706] TestWaitNotification: OK eventTxId 104 2025-10-24T16:23:40.495929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:40.495996Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 1.21ms result status StatusPathDoesNotExist 2025-10-24T16:23:40.496052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:40.496186Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:40.496202Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 20us result status StatusPathDoesNotExist 2025-10-24T16:23:40.496219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25943, MsgBus: 31395 2025-10-24T16:23:39.248809Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824764413682024:2187];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:39.248836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014dc/r3tmp/tmpzkyrpy/pdisk_1.dat 2025-10-24T16:23:39.349728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:39.353263Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:39.355331Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824764413681874:2081] 1761323019247290 != 1761323019247293 2025-10-24T16:23:39.358289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:39.358306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:39.362097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25943, node 1 2025-10-24T16:23:39.380058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:39.380075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:39.380077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:39.380121Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31395 TClient is connected to server localhost:31395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.480648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.488386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:39.511162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.519544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:39.543227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.571700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:39.590233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.729126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764413683526:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.729162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.729323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764413683536:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.729328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.787106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.809114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.819416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.829791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.852542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.876224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.889785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.905568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.946330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764413684397:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.946370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.946535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764413684402:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.946542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764413684403:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.946547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.947501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:39.956066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:23:39.956171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824764413684406:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:23:40.016083Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824768708651754:3555] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:40.250711Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:40.402930Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824768708652067:2528], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At lambda, At function: Coalesce
:4:41: Error: At function: ==
:4:27: Error: At function: Member
:4:27: Error: Member not found: UnknownColumn 2025-10-24T16:23:40.403806Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=MzNjMDhkZmQtMWIwZjQzYTYtM2ZmNTkyOGItNThhNDY2YQ==, ActorId: [1:7564824768708652058:2522], ActorState: ExecuteState, TraceId: 01k8bggw35f468j8d2tr66dfgp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:40.423754Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824768708652076:2532], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:4:43: Error: At function: KiUpdateTable!
:4:43: Error: Column 'UnknownColumn' does not exist in table '/Root/Test'., code: 2017 2025-10-24T16:23:40.424560Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=MzNjMDhkZmQtMWIwZjQzYTYtM2ZmNTkyOGItNThhNDY2YQ==, ActorId: [1:7564824768708652058:2522], ActorState: ExecuteState, TraceId: 01k8bggw3qddrsnznnv77487xt, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> KqpBatchDelete::MultiStatement [GOOD] >> BsControllerConfig::OverlayMapCrossReferences >> KqpBatchUpdate::HasTxControl [GOOD] >> KqpBatchUpdate::Returning [GOOD] >> KqpBatchDelete::Large_3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 24836, MsgBus: 11456 2025-10-24T16:23:38.161978Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824760472116302:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.162178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014eb/r3tmp/tmpOEaHaL/pdisk_1.dat 2025-10-24T16:23:38.223945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:38.241851Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24836, node 1 2025-10-24T16:23:38.263743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.263754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.263756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.263793Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11456 2025-10-24T16:23:38.283750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.283776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.285283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.441262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:38.444312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:23:38.448538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:38.457479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.525580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:38.553026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.573605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.148016Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:39.709415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764767085184:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.709441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.709752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764767085194:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.709760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.887576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.917182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.950159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.988441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.014315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.038369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.061373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.082349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.138548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769062053353:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.138587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.138760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769062053358:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.138770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769062053359:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.138777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.140585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:40.153031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:23:40.153115Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824769062053362:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:23:40.256152Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824769062053416:3560] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:40.718057Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769062053722:2530], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with ON 2025-10-24T16:23:40.718749Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NDkzNjM2ZGUtOWU4YTVhOWQtZGM0OTI2NzAtN2M4NDQ4ZDQ=, ActorId: [1:7564824769062053713:2524], ActorState: ExecuteState, TraceId: 01k8bggwdaemr8awz3wh2ndqkf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-10-24T16:23:39.298191Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.298508Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.298602Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.299005Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.299093Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.299159Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.299166Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.299208Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.301915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.301952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.301986Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.302004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.302017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.302028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::UniqueIndexValidationFailsInsideShard [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.671530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.671549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.671555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.671561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.671568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.671573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.671583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.671597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.671707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.671766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.687402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.687428Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.687507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.689780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.689861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.689892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.692577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.692701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.692823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.692892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.693920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.693962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.694187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.694199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.694231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.694240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.694246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.694272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.695698Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.710755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.710837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.710892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.710899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.710944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.710972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.711624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.711663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.711707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.711714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.711718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.711722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.712087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.712097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.712101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.712358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.712365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.712369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.712374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.712780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.713173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.713204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.713369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.713388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... Z node 416 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:23:35.777193Z node 416 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2470: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 1003, txId# 281474976710761 2025-10-24T16:23:35.777210Z node 416 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2473: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: One of the shards report BUILD_ERROR
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha) at Filling stage, process has to be canceled, shardId: 72075186233409547, shardIdx: 72057594046678944:2, SubscribersCount: 1, CreateSender: [416:422:2394], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 1, DoneShards: 0, ShardsInProgress: 72057594046678944:3, ShardsInProgress: 72057594046678944:2, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710761 2025-10-24T16:23:35.777469Z node 416 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejection_Unlocking 2025-10-24T16:23:35.777487Z node 416 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejection_Unlocking TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: One of the shards report BUILD_ERROR
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha) at Filling stage, process has to be canceled, shardId: 72075186233409547, shardIdx: 72057594046678944:2, SubscribersCount: 1, CreateSender: [416:422:2394], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 1, DoneShards: 0, ShardsInProgress: 72057594046678944:3, ShardsInProgress: 72057594046678944:2, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:23:35.777493Z node 416 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-10-24T16:23:35.777740Z node 416 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejected 2025-10-24T16:23:35.777758Z node 416 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejected TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: One of the shards report BUILD_ERROR
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha) at Filling stage, process has to be canceled, shardId: 72075186233409547, shardIdx: 72057594046678944:2, SubscribersCount: 1, CreateSender: [416:422:2394], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 1, DoneShards: 0, ShardsInProgress: 72057594046678944:3, ShardsInProgress: 72057594046678944:2, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:23:35.777764Z node 416 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 1003, subscribers count# 1 2025-10-24T16:23:35.777778Z node 416 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:23:35.777784Z node 416 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [416:423:2395] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:23:35.777859Z node 416 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-10-24T16:23:35.777919Z node 416 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "One of the shards report BUILD_ERROR
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)\n at Filling stage, process has to be canceled, shardId: 72075186233409547, shardIdx: 72057594046678944:2" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:2 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 3 ReadBytes: 64 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/Table" index { name: "Index" index_columns: "index_key_first_part" index_columns: "index_key_second_part" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "One of the shards report BUILD_ERROR
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)\n at Filling stage, process has to be canceled, shardId: 72075186233409547, shardIdx: 72057594046678944:2" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:2 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 3 ReadBytes: 64 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/Table" index { name: "Index" index_columns: "index_key_first_part" index_columns: "index_key_second_part" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-10-24T16:23:35.777992Z node 416 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:35.778019Z node 416 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 31us result status StatusSuccess 2025-10-24T16:23:35.778135Z node 416 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index_key_first_part" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index_key_second_part" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 12464, MsgBus: 20960 2025-10-24T16:23:37.750080Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824759403481956:2085];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:37.750326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0015b0/r3tmp/tmpAbrU0P/pdisk_1.dat 2025-10-24T16:23:38.227368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:38.267312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.275986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.284183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.368647Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12464, node 1 2025-10-24T16:23:38.519391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.519406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.519409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.519460Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:38.527905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:38.764998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20960 TClient is connected to server localhost:20960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.198436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.212057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:39.216723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.287692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.357528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.392651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.919737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824767993418156:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.919792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.920055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824767993418166:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.920064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.018060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.059088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.070413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.082819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.112791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.142981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.168940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.193622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.285223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772288386332:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.285257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.285559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772288386337:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.285568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772288386338:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.285572Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.287036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:40.302314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:23:40.302428Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824772288386341:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:40.370766Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824772288386393:3558] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:41.089744Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824776583353997:2531], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2025-10-24T16:23:41.090236Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWMwNmNmOWUtMmUzM2RjZGEtMzExODUxZjAtZjAzMGIxOWU=, ActorId: [1:7564824776583353988:2525], ActorState: ExecuteState, TraceId: 01k8bggwrw1zknqj3d9z8efs8v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 20711, MsgBus: 15756 2025-10-24T16:23:38.067696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824760852691805:2163];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.067857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:38.120860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0015a7/r3tmp/tmpyHfWb5/pdisk_1.dat 2025-10-24T16:23:38.447931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:38.450473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.450503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.452491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.520613Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.524931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824756557724383:2081] 1761323018054839 != 1761323018054842 TServer::EnableGrpc on GrpcPort 20711, node 1 2025-10-24T16:23:38.648534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.648548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.648549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.648591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:38.717229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15756 2025-10-24T16:23:39.072624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.193418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.230195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:39.332797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.434081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.459423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.672084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824765147660630:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.672118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.672244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824765147660640:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.672247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.893963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.933202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.954428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.975798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.004584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.029954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.061476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.081504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.124559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769442628794:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.124619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.124770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769442628804:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.124776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769442628805:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.124792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.125653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:40.130998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824769442628808:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:40.224504Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824769442628860:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:40.740268Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769442629166:2530], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:40.740366Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZGYwMDA3OTYtNGMwM2E4OWItZGNmNzQyOGMtZjBjM2E2MTA=, ActorId: [1:7564824769442629157:2524], ActorState: ExecuteState, TraceId: 01k8bggwdr18xt09zzv0vteah0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:40.743938Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769442629170:2532], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:40.744534Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZGYwMDA3OTYtNGMwM2E4OWItZGNmNzQyOGMtZjBjM2E2MTA=, ActorId: [1:7564824769442629157:2524], ActorState: ExecuteState, TraceId: 01k8bggwe5ayaze1cxprhg250j, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:40.751137Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769442629174:2534], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:40.751740Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZGYwMDA3OTYtNGMwM2E4OWItZGNmNzQyOGMtZjBjM2E2MTA=, ActorId: [1:7564824769442629157:2524], ActorState: ExecuteState, TraceId: 01k8bggwec9zkfhgp5bvyjcb85, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:40.759635Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769442629178:2536], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:40.760889Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZGYwMDA3OTYtNGMwM2E4OWItZGNmNzQyOGMtZjBjM2E2MTA=, ActorId: [1:7564824769442629157:2524], ActorState: ExecuteState, TraceId: 01k8bggwem6jj4dwyc7abcrnm4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:40.767088Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824769442629182:2538], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:40.767584Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZGYwMDA3OTYtNGMwM2E4OWItZGNmNzQyOGMtZjBjM2E2MTA=, ActorId: [1:7564824769442629157:2524], ActorState: ExecuteState, TraceId: 01k8bggwew76gsvmx0p3vp3x3x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 15669, MsgBus: 11484 2025-10-24T16:23:38.591241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824761180036365:2246];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.591255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:38.598870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001565/r3tmp/tmpVKmcEw/pdisk_1.dat 2025-10-24T16:23:38.953926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.953954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:39.092428Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:39.094285Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824761180036156:2081] 1761323018585862 != 1761323018585865 2025-10-24T16:23:39.096848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15669, node 1 2025-10-24T16:23:39.152409Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:39.204096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:39.204112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:39.204114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:39.204167Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11484 TClient is connected to server localhost:11484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.491979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.512179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:39.533179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.643803Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:39.676198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.737361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.756242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.056491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769769972403:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.056519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.056661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769769972412:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.056669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.420722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.440300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.468857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.491775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.508779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.530892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.544664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.558496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.609655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769769973275:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.609673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.609831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769769973280:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.609841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824769769973281:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.609891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.610463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:40.620143Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824769769973284:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:40.677087Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824769769973338:3559] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:41.111029Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=MWUwNDM0M2QtYmVmMGE2MDctZDYzMDNmYWYtNTczOTE0YTA=, ActorId: [1:7564824774064940931:2524], ActorState: ExecuteState, TraceId: 01k8bggwrt9fdez4rp1j5zqwth, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:35.985018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:35.985043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:35.985048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:35.985055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:35.985062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:35.985066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:35.985075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:35.985087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:35.986322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:35.987261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:36.278275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:36.278296Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:36.306338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:36.306371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:36.307570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:36.312309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:36.312966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:36.313572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.313634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:36.315666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:36.315709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:36.316751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:36.316761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:36.316782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:36.316790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:36.316795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:36.316815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.319710Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:36.548697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:36.548762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.548818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:36.548826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:36.551593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:36.551608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:36.552928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.552973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:36.553025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.553033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:36.553038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:36.553042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:36.553627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.553636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:36.553640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:36.554501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.554509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:36.554518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.554525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:36.558295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:36.561363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:36.561424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:36.561615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:36.561635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:36.561642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.561686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:36.561692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:36.561716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:36.561725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:36.562153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:36.562159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... TransactionResult> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:41.630475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:41.632355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:41.632392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:23:41.632400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-10-24T16:23:41.632514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1091: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-10-24T16:23:41.632542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:23:41.632554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 129 -> 240 2025-10-24T16:23:41.632713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:41.632723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:41.632727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:41.632732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-10-24T16:23:41.632737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:41.632841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:41.632848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:41.632851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:41.632854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:23:41.632856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:23:41.632864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-10-24T16:23:41.635212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:41.635228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:41.635320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:41.635375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:41.635379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:41.635383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:41.635387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:41.635392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:23:41.635411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:413:2379] message: TxId: 103 2025-10-24T16:23:41.635416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:41.635420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:23:41.635424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:23:41.635441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:41.636096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:41.636110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:41.636242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:41.636260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:41.636695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:41.636707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-10-24T16:23:41.636723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:41.636729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:667:2599] 2025-10-24T16:23:41.636979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:41.637217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:41.637260Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 55us result status StatusSuccess 2025-10-24T16:23:41.637388Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement >> KqpBatchDelete::HasTxControl [GOOD] |54.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |54.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions >> KqpBatchUpdate::NotIdempotent >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query [GOOD] >> KqpBatchDelete::ManyPartitions_1 >> BsControllerConfig::ReassignGroupDisk [GOOD] >> BsControllerConfig::PDiskCreate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 15104, MsgBus: 29210 2025-10-24T16:23:40.427511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824770426106656:2258];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:40.427576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014b2/r3tmp/tmpCVNw5g/pdisk_1.dat 2025-10-24T16:23:40.538630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:40.538656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:40.545446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:40.560018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:40.563383Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15104, node 1 2025-10-24T16:23:40.700026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:40.700040Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:40.700042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:40.700081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29210 2025-10-24T16:23:40.796919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:40.899931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:40.902993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:40.913076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.938573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.063604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.080199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.289288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774721075368:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.289311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.289402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774721075378:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.289409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.367463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.382380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.391357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:41.397188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.414034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.427436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.446307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.471503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.509664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.551681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774721076246:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.551768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.551977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774721076256:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.551989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774721076257:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.552010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.553045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:41.558285Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824774721076260:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:41.617075Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824774721076312:3562] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:41.987727Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=ZjFjMmVmMmItNmZmZDBkZS1hZTIwZWUwLWU1ZjQ4MGFl, ActorId: [1:7564824774721076607:2522], ActorState: ExecuteState, TraceId: 01k8bggxmd05qwhek6agphjh48, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest |54.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |54.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |54.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream >> Viewer::CommitOffsetTest [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler >> ReadSessionImplTest::CommonHandler [GOOD] |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:290:2068] recipient: [1:277:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:290:2068] recipient: [1:277:2079] Leader for TabletID 72057594037932033 is [1:292:2081] sender: [1:293:2068] recipient: [1:277:2079] 2025-10-24T16:23:39.005542Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.005849Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.005942Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.006291Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.006326Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.006363Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.006370Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.006415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.007706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.007738Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.007767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.007787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.007799Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.007809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:292:2081] sender: [1:314:2068] recipient: [1:22:2069] 2025-10-24T16:23:39.018382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:39.018436Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.040070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.040126Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.040143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.040156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.040186Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.040193Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.040199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.040206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.051343Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.051410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.061705Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.061758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:39.061961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:39.061966Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:39.061998Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:39.062010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:39.063764Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-10-24T16:23:39.063897Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-10-24T16:23:39.063905Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-10-24T16:23:39.063909Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-10-24T16:23:39.063913Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-10-24T16:23:39.063917Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-10-24T16:23:39.063922Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-10-24T16:23:39.063926Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-10-24T16:23:39.063930Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-10-24T16:23:39.063934Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-10-24T16:23:39.063939Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-10-24T16:23:39.063943Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-10-24T16:23:39.063947Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-10-24T16:23:39.076060Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:290:2068] recipient: [13:277:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:290:2068] recipient: [13:277:2079] Leader for TabletID 72057594037932033 is [13:292:2081] sender: [13:293:2068] recipient: [13:277:2079] 2025-10-24T16:23:41.234026Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:41.234239Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:41.234325Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:41.234568Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:41.234608Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:41.234652Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:41.234658Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:41.234726Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:41.235962Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:41.236000Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:41.236037Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:41.236058Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:41.236075Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:41.236085Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:292:2081] sender: [13:314:2068] recipient: [13:22:2069] 2025-10-24T16:23:41.249063Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:41.249121Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:41.273313Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:41.273376Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:41.273395Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:41.273410Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:41.273443Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:41.273451Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:41.273457Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:41.273465Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:41.284604Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:41.284666Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:41.296244Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:41.296305Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:41.296526Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:41.296537Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:41.296589Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:41.296597Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:41.296890Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-10-24T16:23:41.297036Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-10-24T16:23:41.297045Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-10-24T16:23:41.297053Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-10-24T16:23:41.297057Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-10-24T16:23:41.297062Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-10-24T16:23:41.297068Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-10-24T16:23:41.297072Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-10-24T16:23:41.297076Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-10-24T16:23:41.297080Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-10-24T16:23:41.297085Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-10-24T16:23:41.297091Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-10-24T16:23:41.297096Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-10-24T16:23:41.313103Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2025-10-24T16:23:39.660109Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.660419Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.660513Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.660824Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.660932Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.660988Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.660995Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.661036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.662167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.662196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.662228Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.662246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.662259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.662272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:248:2066] recipient: [1:20:2067] 2025-10-24T16:23:39.672949Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:39.673010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.696149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.696203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.696221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.696238Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.696273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.696285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.696292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.696304Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.712072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.712128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.724181Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.724246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:39.724477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:39.724486Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:39.724533Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:39.724549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:39.727645Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-10-24T16:23:39.727816Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-10-24T16:23:39.727826Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-10-24T16:23:39.727831Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-10-24T16:23:39.727836Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-10-24T16:23:39.727841Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-10-24T16:23:39.727862Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-10-24T16:23:39.727867Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-10-24T16:23:39.727872Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-10-24T16:23:39.727877Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-10-24T16:23:39.727882Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-10-24T16:23:39.727887Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-10-24T16:23:39.727895Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-10-24T16:23:39.727900Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-10-24T16:23:39.727905Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-10-24T16:23:39.727909Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-10-24T16:23:39.727914Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-10-24T16:23:39.727919Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-10-24T16:23:39.727924Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-10-24T16:23:39.727928Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-10-24T16:23:39.727941Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-10-24T16:23:39.727946Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-10-24T16:23:39.727950Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-10-24T16:23:39.727955Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-10-24T16:23:39.727960Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-10-24T16:23:39.727965Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-10-24T16:23:39.727969Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-10-24T16:23:39.727974Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-10-24T16:23:39.727979Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-10-24T16:23:39.727984Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-10-24T16:23:39.727989Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:205:2077] 2025-10-24T16:23:41.487809Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:41.488031Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:41.488117Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:41.488338Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:41.488386Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:41.488437Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:41.488444Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:41.488494Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:41.489622Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:41.489657Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:41.489693Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:41.489711Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:41.489726Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:41.489736Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:248:2066] recipient: [11:20:2067] 2025-10-24T16:23:41.501474Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:41.501528Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:41.528089Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:41.528148Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:41.528168Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:41.528183Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:41.528217Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:41.528225Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:41.528232Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:41.528242Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:41.539236Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:41.539292Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:41.549661Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:41.549715Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:41.549898Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:41.549905Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:41.549940Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:41.549948Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:41.550178Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-10-24T16:23:41.550276Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-10-24T16:23:41.550281Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-10-24T16:23:41.550285Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-10-24T16:23:41.550289Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-10-24T16:23:41.550293Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-10-24T16:23:41.550298Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-10-24T16:23:41.550302Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-10-24T16:23:41.550306Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-10-24T16:23:41.550310Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-10-24T16:23:41.550314Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-10-24T16:23:41.550318Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-10-24T16:23:41.550327Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-10-24T16:23:41.550331Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-10-24T16:23:41.550335Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-10-24T16:23:41.550339Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-10-24T16:23:41.550343Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-10-24T16:23:41.550352Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-10-24T16:23:41.550356Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-10-24T16:23:41.550360Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-10-24T16:23:41.550364Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-10-24T16:23:41.550369Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-10-24T16:23:41.550373Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-10-24T16:23:41.550381Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-10-24T16:23:41.550386Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-10-24T16:23:41.550391Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-10-24T16:23:41.550395Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-10-24T16:23:41.550400Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-10-24T16:23:41.550404Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-10-24T16:23:41.550408Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-10-24T16:23:41.550412Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> ExternalBlobsMultipleChannels::WithCompaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-10-24T16:23:06.604354Z :ReadSession INFO: Random seed for debugging is 1761322986604348 2025-10-24T16:23:06.806718Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824623749913300:2264];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:06.806791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:06.835531Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:06.836531Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824626204913013:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:06.836553Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0013a3/r3tmp/tmpHaGGM1/pdisk_1.dat 2025-10-24T16:23:06.843386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:06.843485Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:06.875892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:06.904139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:06.904172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:06.913448Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:06.920153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:06.920961Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:06.930592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:06.930611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:06.939898Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:06.949565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61864, node 1 2025-10-24T16:23:06.999046Z INFO: TTestServer started on Port 4376 GrpcPort 61864 2025-10-24T16:23:06.996260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0013a3/r3tmp/yandexCs4OCp.tmp 2025-10-24T16:23:06.996274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0013a3/r3tmp/yandexCs4OCp.tmp 2025-10-24T16:23:06.996352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0013a3/r3tmp/yandexCs4OCp.tmp 2025-10-24T16:23:06.996375Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4376 2025-10-24T16:23:07.075895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:61864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:07.108852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:23:07.419072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628044881351:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.419095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.422315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628044881386:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.422320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628044881387:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.422336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.423274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:07.439171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824628044881390:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:23:07.488754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:07.502815Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824628044881520:2693] txid# 281474976720663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.520807Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824630499880623:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:07.521594Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=YzA0YWFiMjEtOTM4MWE1OWUtMWM5MmY3OTQtZWMxMWI2MDY=, ActorId: [2:7564824630499880580:2292], ActorState: ExecuteState, TraceId: 01k8bgfvym892rvtvhp51np7n1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:07.522275Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:23:07.523294Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824628044881551:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:07.524496Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=MTFmZDI5YmItZWViOWY2NjktMWE2NWIwMmMtNGEzZDJjZGQ=, ActorId: [1:7564824628044881345:2320], ActorState: ExecuteState, TraceId: 01k8bgfvws6g7kcfavdgp6sqey, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:07.524611Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness o ... "" } BlobsFromDisk: 0 BlobsFromCache: 1 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 2 } 2025-10-24T16:23:43.325854Z node 7 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset 3 2025-10-24T16:23:43.325862Z node 7 :PQ_READ_PROXY DEBUG: partition_actor.cpp:901: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid bda388fc-23339124-631b41e2-b3a1ea1d has messages 1 2025-10-24T16:23:43.325893Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1940: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 read done: guid# bda388fc-23339124-631b41e2-b3a1ea1d, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-10-24T16:23:43.325901Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2102: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 response to read: guid# bda388fc-23339124-631b41e2-b3a1ea1d 2025-10-24T16:23:43.326010Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2145: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 Process answer. Aval parts: 0 DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-10-24T16:23:42.181000Z WriteTime: 2025-10-24T16:23:42.186000Z Ip: "ipv6:[::1]:35536" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:35536" } } } } 2025-10-24T16:23:43.328051Z :INFO: [/Root] [/Root] [a664c20b-c690624b-5b6ef04-ed5f94b2] Closing read session. Close timeout: 3.000000s 2025-10-24T16:23:43.328060Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-10-24T16:23:43.328067Z :INFO: [/Root] [/Root] [a664c20b-c690624b-5b6ef04-ed5f94b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1356 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:23:43.328241Z :INFO: [/Root] [/Root] [a664c20b-c690624b-5b6ef04-ed5f94b2] Closing read session. Close timeout: 0.000000s 2025-10-24T16:23:43.328249Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-10-24T16:23:43.328254Z :INFO: [/Root] [/Root] [a664c20b-c690624b-5b6ef04-ed5f94b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1356 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:23:43.328276Z :NOTICE: [/Root] [/Root] [a664c20b-c690624b-5b6ef04-ed5f94b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:23:43.333719Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_16256977668307717203_v1 2025-10-24T16:23:43.333743Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [7:7564824773306413021:2487] destroyed 2025-10-24T16:23:43.333761Z node 8 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_7_1_16256977668307717203_v1 2025-10-24T16:23:43.333038Z node 7 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 grpc read done: success# 1, data# { read { } } 2025-10-24T16:23:43.333067Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 grpc closed 2025-10-24T16:23:43.333081Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_7_1_16256977668307717203_v1 is DEAD 2025-10-24T16:23:43.333958Z node 7 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [7:7564824773306413018:2484] disconnected. 2025-10-24T16:23:43.333965Z node 7 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [7:7564824773306413018:2484] disconnected; active server actors: 1 2025-10-24T16:23:43.333969Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [7:7564824773306413018:2484] client user disconnected session shared/user_7_1_16256977668307717203_v1 2025-10-24T16:23:43.424946Z node 8 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:43.424962Z node 8 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:43.424965Z node 8 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:23:43.928821Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:43.928828Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:43.928832Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:43.939907Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:43.951928Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:43.972153Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:43.975954Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-10-24T16:23:43.976388Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:43.976393Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:43.976396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:43.985215Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:43.995912Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:43.995977Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.000021Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:44.000923Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-10-24T16:23:44.003813Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-10-24T16:23:44.003834Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-10-24T16:23:44.008023Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:44.008033Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-10-24T16:23:44.008038Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-10-24T16:23:44.008046Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-10-24T16:23:44.024163Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.024168Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.024171Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:44.040147Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:44.048078Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:44.048144Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.051908Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-10-24T16:23:44.052095Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.055900Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:44.056060Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:44.056070Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-10-24T16:23:44.056082Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-10-24T16:23:44.056485Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.056489Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.056492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:44.068004Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-10-24T16:23:44.069138Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-10-24T16:23:44.069194Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.127909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:44.128013Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-10-24T16:23:44.128037Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-10-24T16:23:44.128054Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> KqpBatchDelete::TableWithIndex [GOOD] >> KqpRbo::Aggregation >> KqpRbo::Select >> KqpRbo::CrossInnerJoin >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> KqpRbo::Filter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::CommitOffsetTest [GOOD] Test command err: 2025-10-24T16:23:20.711624Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:705:2358], Scheduled retry for error: {
: Error: Scheme service not found } 2025-10-24T16:23:20.723784Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:20.724256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:20.978023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:20.978713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:20.979188Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:709:2418], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:20.979254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:20.979285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:20.980375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:20.980402Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:21.098474Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:21.150268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:21.151980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:21.152254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:21.152265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:21.234866Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:21.235012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:21.235153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:21.353307Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:23:21.381038Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:21.457056Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 61947, node 1 TClient is connected to server localhost:10065 2025-10-24T16:23:21.944302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:21.944324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:21.944329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:21.944430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration Request timer = 0.01292279906 BASE_PERF = 1.326700805 2025-10-24T16:23:22.923748Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7564824693310165235:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:22.923838Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:22.932148Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:22.941667Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65481, node 3 2025-10-24T16:23:22.956060Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:22.956072Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:22.956075Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:22.956126Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:22.995420Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:22.998947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:23.025096Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:23.025131Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:23.028861Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:23.061299Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:23.064701Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-10-24T16:23:23.066340Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-10-24T16:23:23.066820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:23.067709Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:23:23.096435Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:23.341598Z node 3 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-10-24T16:23:23.341623Z node 3 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-10-24T16:23:23.350207Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824697605133007:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:23.350231Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:23.350363Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824697605133019:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:23.351631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_crea ... uence_numbers: 19984 offsets: 39983 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.321973Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19984 2025-10-24T16:23:42.321983Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19985 offsets: 39984 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.321985Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19985 2025-10-24T16:23:42.321995Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19986 offsets: 39985 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.321997Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19986 2025-10-24T16:23:42.322007Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19987 offsets: 39986 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322009Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19987 2025-10-24T16:23:42.322020Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19988 offsets: 39987 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322022Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19988 2025-10-24T16:23:42.322033Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19989 offsets: 39988 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322035Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19989 2025-10-24T16:23:42.322045Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19990 offsets: 39989 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322047Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19990 2025-10-24T16:23:42.322057Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19991 offsets: 39990 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322059Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19991 2025-10-24T16:23:42.322069Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19992 offsets: 39991 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322071Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19992 2025-10-24T16:23:42.322080Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19993 offsets: 39992 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322082Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19993 2025-10-24T16:23:42.322091Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19994 offsets: 39993 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322093Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19994 2025-10-24T16:23:42.322103Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19995 offsets: 39994 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322104Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19995 2025-10-24T16:23:42.322114Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19996 offsets: 39995 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322117Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19996 2025-10-24T16:23:42.322127Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19997 offsets: 39996 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322129Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19997 2025-10-24T16:23:42.322138Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19998 offsets: 39997 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322140Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19998 2025-10-24T16:23:42.322149Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 19999 offsets: 39998 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322151Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 19999 2025-10-24T16:23:42.322162Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session got write response: sequence_numbers: 20000 offsets: 39999 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 3 } 2025-10-24T16:23:42.322164Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: acknoledged message 20000 2025-10-24T16:23:42.346534Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [6:7564824779464420417:2489], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:42.347364Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=6&id=ZjdhM2NjMjctZDJlYjkxMS1iODZiMzc3NS1mODQwMWM1Mw==, ActorId: [6:7564824779464420415:2488], ActorState: ExecuteState, TraceId: 01k8bggy04egyqjyr2qqjtaxy0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:42.347514Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:23:42.379114Z :INFO: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session will now close 2025-10-24T16:23:42.379136Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: aborting 2025-10-24T16:23:42.379288Z :INFO: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:23:42.379298Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|fbf7a61f-cd8340d2-571aa612-6f908efe_0] Write session: destroy 2025-10-24T16:23:43.200723Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:23:43.200738Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:43.353208Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [6:7564824783759387730:2496], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:23:43.353812Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=6&id=NDRmYmUwZTQtNmM3ZDRkMzEtNjhjY2MxYWMtYTJjYzQyOWU=, ActorId: [6:7564824783759387728:2495], ActorState: ExecuteState, TraceId: 01k8bggyzmf2dy838jtefasjxy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:23:43.353939Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:23:43.396042Z node 6 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [6:7564824783759387741:5465], for# username, access# DescribeSchema 2025-10-24T16:23:43.456794Z node 6 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [6:7564824783759387752:5468], for# username, access# DescribeSchema |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> KqpBatchUpdate::NotIdempotent [GOOD] >> KqpBatchUpdate::MultiStatement [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 12303, MsgBus: 26890 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00148f/r3tmp/tmpq5QvtG/pdisk_1.dat 2025-10-24T16:23:41.172947Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824775294449305:2254];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:41.173093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:41.225399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:41.225427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:41.226283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12303, node 1 2025-10-24T16:23:41.235486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:41.235586Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:41.235922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824775294449078:2081] 1761323021166632 != 1761323021166635 2025-10-24T16:23:41.248061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:41.248080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:41.248083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:41.248127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26890 TClient is connected to server localhost:26890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:41.320777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:41.328439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:41.418021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:41.437654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.501993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.554037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.585953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.732109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824775294450723:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.732160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.732338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824775294450733:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.732343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.790787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.808485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.826426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.844852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.858405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.872020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.905963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.928561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.952918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824775294451595:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.952953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.953123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824775294451600:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.953134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824775294451601:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.953139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.954059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:41.958380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824775294451604:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:42.023278Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824779589418952:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:42.175912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:42.372992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.389471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.410275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.810893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) >> KqpBatchDelete::SimpleOnePartition [GOOD] >> ObjectStorageListingTest::ListingNoFilter |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |54.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |54.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |54.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 28387, MsgBus: 15727 2025-10-24T16:23:43.670141Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824785327696002:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:43.670158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00146e/r3tmp/tmpjNAd7K/pdisk_1.dat 2025-10-24T16:23:44.094106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:44.096250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:44.096279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:44.100234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:44.104272Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28387, node 1 2025-10-24T16:23:44.136475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:44.136486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:44.136488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:44.136537Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:44.244669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15727 TClient is connected to server localhost:15727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:44.658203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:44.672484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:44.718833Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:44.725647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.918785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.032846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.062554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.261903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824793917632195:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.261940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.266015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824793917632205:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.266063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.424915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.434087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.444542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.457738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.471833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.485373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.501551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.519311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.548420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824793917633072:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.548449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.548530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824793917633077:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.548539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824793917633078:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.548546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.549414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:45.553762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:23:45.553966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824793917633081:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:45.656751Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824793917633133:3558] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:45.916315Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824793917633439:2529], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-10-24T16:23:45.917180Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NzU0NDQzYmQtODI5YTIxNzItZDY5OWE3NjgtNWQwMDgxM2E=, ActorId: [1:7564824793917633430:2523], ActorState: ExecuteState, TraceId: 01k8bgh1fm43v9mk30tb82pp1z, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.922876Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824793917633443:2531], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-10-24T16:23:45.922966Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NzU0NDQzYmQtODI5YTIxNzItZDY5OWE3NjgtNWQwMDgxM2E=, ActorId: [1:7564824793917633430:2523], ActorState: ExecuteState, TraceId: 01k8bgh1fy820nj2g38w157pyb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.926910Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824793917633447:2533], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2025-10-24T16:23:45.927027Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NzU0NDQzYmQtODI5YTIxNzItZDY5OWE3NjgtNWQwMDgxM2E=, ActorId: [1:7564824793917633430:2523], ActorState: ExecuteState, TraceId: 01k8bgh1g4ahy7ddhsr894tq7h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 32292, MsgBus: 24327 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00157e/r3tmp/tmp1EQ0xx/pdisk_1.dat 2025-10-24T16:23:37.991200Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824758309312613:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:37.991640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:38.087399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:38.087799Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:38.087999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824758309312565:2081] 1761323017988902 != 1761323017988905 TServer::EnableGrpc on GrpcPort 32292, node 1 2025-10-24T16:23:38.097012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.097038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.099606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.103879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.103890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.103892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.103930Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24327 TClient is connected to server localhost:24327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.205564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.209610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:23:38.217037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.250703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.285285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.301817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.580531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824762604281520:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.580558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.580677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824762604281530:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.580682Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.636586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.656924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.670834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.682087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.707762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.727287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.744685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.762644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.785482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824762604282391:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.785503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.785658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824762604282396:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.785667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824762604282397:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.785672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.786509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok ... :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:44.361633Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:44.361642Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:44.361695Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:44.375767Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31177 TClient is connected to server localhost:31177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:44.524388Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:44.528172Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:44.543673Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.585371Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.636181Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.659273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.019554Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824791343055351:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.019585Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.027098Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824791343055369:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.027202Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.030711Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.052917Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.083642Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.100166Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.109254Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.124156Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.142841Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.159009Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.216094Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824791343056222:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.216123Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.216305Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824791343056227:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.216313Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824791343056228:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.216372Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.217193Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:45.224215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:23:45.224585Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564824791343056231:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:23:45.270390Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:45.321074Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564824791343056292:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpRbo::Aggregation [GOOD] >> KqpRbo::AliasesRenames |54.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 12465, MsgBus: 18305 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00147a/r3tmp/tmp3Hm19R/pdisk_1.dat 2025-10-24T16:23:42.603734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:42.603781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:42.604892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824779447581746:2081] 1761323022520021 != 1761323022520024 TServer::EnableGrpc on GrpcPort 12465, node 1 2025-10-24T16:23:42.608732Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:42.618068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:42.618077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:42.618078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:42.618107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18305 2025-10-24T16:23:42.644871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:42.644905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:42.650424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:42.808477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:42.832390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:42.914439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:43.052351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:43.368081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:43.457542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:43.738540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:44.878249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824788037518006:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:44.878310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:44.878423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824788037518016:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:44.878427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.058203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.076504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.086786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.118982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.152452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.183685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.212438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.242976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.302596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824792332486176:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.302627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.302774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824792332486181:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.302788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824792332486182:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.302811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.304745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:45.323705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:23:45.323788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824792332486185:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:23:45.407173Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824792332486237:3569] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:45.757533Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824792332486544:2530], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:45.758662Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWM0NDdkMDMtZGMxNWNjN2ItNWYwMDU4YTctZWIyYjIxMzU=, ActorId: [1:7564824792332486535:2524], ActorState: ExecuteState, TraceId: 01k8bgh1a3agessqef19qcwf5g, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.764324Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824792332486548:2532], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:45.771940Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWM0NDdkMDMtZGMxNWNjN2ItNWYwMDU4YTctZWIyYjIxMzU=, ActorId: [1:7564824792332486535:2524], ActorState: ExecuteState, TraceId: 01k8bgh1b1fqkq6jq1jjshb7za, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.779200Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824792332486552:2534], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:45.779321Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWM0NDdkMDMtZGMxNWNjN2ItNWYwMDU4YTctZWIyYjIxMzU=, ActorId: [1:7564824792332486535:2524], ActorState: ExecuteState, TraceId: 01k8bgh1bg0k5zyq7njp303tnk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.784544Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824792332486556:2536], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:45.785829Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWM0NDdkMDMtZGMxNWNjN2ItNWYwMDU4YTctZWIyYjIxMzU=, ActorId: [1:7564824792332486535:2524], ActorState: ExecuteState, TraceId: 01k8bgh1bnfyvd2f2q3q3347mt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:23:45.800728Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824792332486560:2538], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-10-24T16:23:45.801689Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=OWM0NDdkMDMtZGMxNWNjN2ItNWYwMDU4YTctZWIyYjIxMzU=, ActorId: [1:7564824792332486535:2524], ActorState: ExecuteState, TraceId: 01k8bgh1c1dn26vmdc99rpae1p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpRbo::Select [GOOD] |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpRbo::Filter [GOOD] >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> ObjectStorageListingTest::FilterListing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Select [GOOD] Test command err: Trying to start YDB, gRPC: 16578, MsgBus: 22690 2025-10-24T16:23:45.676811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824793337187686:2173];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:45.676867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00186c/r3tmp/tmpfThast/pdisk_1.dat 2025-10-24T16:23:45.767422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:45.767532Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16578, node 1 2025-10-24T16:23:45.784042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.784078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.788233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.808109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:45.808125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:45.808127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:45.808177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22690 TClient is connected to server localhost:22690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:45.928777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:45.932378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:46.678347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:46.700944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797632155511:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.700971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.701211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797632155523:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.703432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:46.709972Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824797632155525:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:46.796170Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824797632155587:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Filter [GOOD] Test command err: Trying to start YDB, gRPC: 12012, MsgBus: 17461 2025-10-24T16:23:45.745907Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824792687512878:2083];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:45.748277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00183f/r3tmp/tmpLwVmry/pdisk_1.dat 2025-10-24T16:23:45.826183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:45.830241Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:45.830557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824792687512824:2081] 1761323025744203 != 1761323025744206 TServer::EnableGrpc on GrpcPort 12012, node 1 2025-10-24T16:23:45.853303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.853352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.853950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.868085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:45.868099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:45.868102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:45.868150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17461 TClient is connected to server localhost:17461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:46.062668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:46.093948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:46.730224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824796982480795:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.730249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.730365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824796982480805:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.730369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.748991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:47.024942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.102648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824801277448212:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.102678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.102775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824801277448217:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.102782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824801277448218:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.102842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.103643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:47.106278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:23:47.106381Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824801277448221:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:23:47.194236Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824801277448272:2405] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TSchemeShardSecretTest::DropSecret >> IndexBuildTestReboots::UniqueIndexValidationFailsBetweenShards [GOOD] >> KqpRbo::AliasesRenames [GOOD] >> KqpBatchUpdate::Large_1 [GOOD] >> Viewer::JsonStorageListingV2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2025-10-24T16:23:40.904354Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:40.904703Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:40.904808Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:40.905107Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:40.905192Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:40.905242Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:40.905248Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:40.905291Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:40.906363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:40.906389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:40.906417Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:40.906435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:40.906448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:40.906458Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2025-10-24T16:23:40.917598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:40.917654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:40.941511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:40.941578Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:40.941595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:40.941608Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:40.941639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:40.941646Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:40.941652Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:40.941658Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:40.952089Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:40.952150Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:40.964128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:40.964189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:40.966604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:40.966612Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:40.966657Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:40.966670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:41.009281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-10-24T16:23:41.009721Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-10-24T16:23:41.009813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:205:2077] 2025-10-24T16:23:42.751246Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:42.751438Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:42.751512Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:42.751931Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:42.751983Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:42.752035Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:42.752042Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:42.752089Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:42.760305Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:42.760353Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:42.760385Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:42.760402Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:42.760416Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:42.760425Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2025-10-24T16:23:42.771043Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:42.771097Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:42.795876Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:42.795935Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:42.795952Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:42.795966Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:42.795997Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:42.796004Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:42.796010Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:42.796020Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:42.811274Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:42.811329Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:42.824261Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:42.824311Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:42.825474Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:42.825482Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:42.825523Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:42.825530Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:42.825715Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-10-24T16:23:42.826231Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-10-24T16:23:44.881233Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-10-24T16:23:44.881298Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-10-24T16:23:44.881620Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-10-24T16:23:44.881686Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-10-24T16:23:44.881752Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-10-24T16:23:44.881820Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-10-24T16:23:44.881890Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-10-24T16:23:44.882019Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-10-24T16:23:44.882092Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-10-24T16:23:44.882162Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-10-24T16:23:44.882228Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-10-24T16:23:44.882300Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-10-24T16:23:44.883144Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-10-24T16:23:44.883248Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:203:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:203:2077] Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:227:2066] recipient: [31:203:2077] 2025-10-24T16:23:46.661016Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:46.661204Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:46.661277Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:46.661453Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:46.661533Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:46.661584Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:46.661590Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:46.661636Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:46.662905Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:46.662941Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:46.662973Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:46.662991Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:46.663007Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:46.663017Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:247:2066] recipient: [31:20:2067] 2025-10-24T16:23:46.673644Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:46.673696Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:46.695727Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:46.695789Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:46.695810Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:46.695826Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:46.695877Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:46.695890Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:46.695898Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:46.695910Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:46.706774Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:46.706827Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:46.717137Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:46.717179Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:46.717411Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:46.717418Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:46.717458Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:46.717466Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:46.717632Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-10-24T16:23:46.717823Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-10-24T16:23:46.717908Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-10-24T16:23:46.717991Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-10-24T16:23:46.718072Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-10-24T16:23:46.718167Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-10-24T16:23:46.718247Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-10-24T16:23:46.718330Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-10-24T16:23:46.718412Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-10-24T16:23:46.718496Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-10-24T16:23:46.718585Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-10-24T16:23:46.718673Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-10-24T16:23:46.718774Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-10-24T16:23:46.718864Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-10-24T16:23:46.718953Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-10-24T16:23:46.719041Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-10-24T16:23:46.719132Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-10-24T16:23:46.719226Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-10-24T16:23:46.719322Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-10-24T16:23:46.719413Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> KqpRbo::CrossInnerJoin [GOOD] >> KqpRbo::Bench_Select >> Viewer::JsonStorageListingV2GroupIdFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 32356, MsgBus: 61247 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014cf/r3tmp/tmpuOFgYB/pdisk_1.dat 2025-10-24T16:23:39.708606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824764550136225:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:39.708669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:39.713099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:39.767958Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32356, node 1 2025-10-24T16:23:39.804273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:39.804286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:39.804288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:39.804334Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:39.817876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:39.817913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:39.820374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:39.867905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61247 TClient is connected to server localhost:61247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.972756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.975764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:39.981582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.028180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.084067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.108044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.306501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824768845104913:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.306533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.306730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824768845104923:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.306736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.359681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.379866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.397088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.416737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.459747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.480478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.503146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.524219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.551460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824768845105783:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.551491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.551599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824768845105788:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.551609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824768845105789:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.551659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.552485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:1 ... 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:48.719591Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:48.728278Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:48.740013Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:48.772959Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:48.844516Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:48.859562Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.246102Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673815629:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.246173Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.272613Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.284248Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673815751:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.284280Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.285257Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673815755:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.285805Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.287242Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.307200Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.321376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.335871Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.356010Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.372807Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.399877Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.457060Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673816503:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.457090Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.457217Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673816508:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.457222Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824807673816509:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.457239Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.458258Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:49.461788Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:49.462175Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:23:49.462223Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564824807673816512:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:23:49.544492Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564824807673816573:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:49.996669Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... >> TSchemeShardSecretTest::DefaultDescribeSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::AliasesRenames [GOOD] Test command err: Trying to start YDB, gRPC: 12684, MsgBus: 11319 2025-10-24T16:23:45.624413Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824793529145615:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:45.624521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00183c/r3tmp/tmplkdNMs/pdisk_1.dat 2025-10-24T16:23:45.661840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:45.674120Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12684, node 1 2025-10-24T16:23:45.690142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:45.690153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:45.690155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:45.690191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11319 2025-10-24T16:23:45.733798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.733832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.738553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:45.816905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:45.820247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:45.828514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:46.368582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797824113454:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.368623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.369080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797824113464:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.369121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.484366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:46.514496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:46.549454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797824113642:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.549490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.550623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797824113647:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.550660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824797824113648:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.550673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.552604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:46.558390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824797824113651:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-10-24T16:23:46.632053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:46.646852Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824797824113711:2457] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 27768, MsgBus: 16876 2025-10-24T16:23:47.009213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:47.009387Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824796672849722:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:47.009398Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00183c/r3tmp/tmpCpj46y/pdisk_1.dat 2025-10-24T16:23:47.072732Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:47.072761Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:47.080284Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:47.082902Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:47.083031Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824796672849505:2081] 1761323027004179 != 1761323027004182 TServer::EnableGrpc on GrpcPort 27768, node 2 2025-10-24T16:23:47.085647Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:47.103371Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:47.103381Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:47.103383Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:47.103426Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16876 TClient is connected to server localhost:16876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ... have access permissions } 2025-10-24T16:23:47.682487Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824800967817738:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.682501Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824800967817739:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.682507Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.683487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:47.686269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:23:47.686355Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824800967817742:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:23:47.736891Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824800967817793:2503] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:48.016204Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 26275, MsgBus: 6806 2025-10-24T16:23:48.198601Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:48.198633Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00183c/r3tmp/tmpYhcS0d/pdisk_1.dat 2025-10-24T16:23:48.324203Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:48.328653Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:48.328673Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:48.334817Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26275, node 3 2025-10-24T16:23:48.339317Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:48.339327Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:48.339329Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:48.339371Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:48.411927Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6806 TClient is connected to server localhost:6806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:48.760645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:48.768882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:49.164476Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:49.314243Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824809038278809:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.314271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.320168Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824809038278819:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.320191Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.332546Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.355868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.372737Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:49.490195Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824809038279074:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.490223Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.490388Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824809038279079:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.490395Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824809038279080:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.490399Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:49.491529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:49.495314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-10-24T16:23:49.495392Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824809038279083:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:23:49.588928Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824809038279134:2512] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TSchemeShardSecretTest::CreateSecret >> Cdc::KeysOnlyLog[YdsRunner] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingObject >> TSchemeShardSecretTest::DropNotASecret [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes >> KqpRbo::Bench_Select [GOOD] >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-10-24T16:23:48.603581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:49.036013Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:49.036120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:49.036137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0016c6/r3tmp/tmpHyFR8B/pdisk_1.dat 2025-10-24T16:23:50.049461Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:50.052750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:50.053083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:50.053182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323026546134 != 1761323026546137 2025-10-24T16:23:50.088419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:50.200537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:50.262210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:50.367656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:50.418546Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:23:50.419246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:23:50.572814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:23:50.572858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:23:50.573039Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:23:50.573048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:23:50.573054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:23:50.573106Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:23:50.573127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:23:50.573139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:23:50.583451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:23:50.689982Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:23:50.690057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:23:50.690389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:23:50.690396Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:50.690402Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:23:50.690408Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:50.690569Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:23:50.690591Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:23:50.690609Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:50.690616Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:50.690624Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:23:50.690629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:50.690640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:23:50.690732Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:50.690794Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:23:50.690810Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:23:50.692955Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:50.703413Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:50.703445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:23:50.861957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:23:50.863809Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:50.863824Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:50.863876Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:50.863883Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:50.863892Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:50.863951Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:23:50.863979Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:50.866260Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:50.866278Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:23:50.867797Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:50.880237Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:50.880614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:23:50.880623Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:50.880665Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:23:50.880675Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:50.881192Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:50.881200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:50.881205Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:50.881220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:50.881229Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:50.881238Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:50.896268Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:50.896979Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:50.897067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:50.897073Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:50.915035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:50.915058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:50.915067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:50.915521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:50.915538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:50.922962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:50.931440Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:50.981002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:51.094878Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:51.095310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:51.235323Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:827:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:52.259444Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgh6c24vbmxrp0ny0spkdm, Database: , SessionId: ydb://session/3?node_id=1&id=NzdiYTk2NjUtZGYyMDk0ODEtNTdmNDNmMGMtMjAwNjhiM2I=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:52.292757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:858:2677], serverId# [1:859:2678], sessionId# [0:0:0] 2025-10-24T16:23:52.292926Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:23:52.292973Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-10-24T16:23:52.304260Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:52.334175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:866:2684], serverId# [1:867:2685], sessionId# [0:0:0] 2025-10-24T16:23:52.334231Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-10-24T16:23:52.334273Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-10-24T16:23:52.334302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:866:2684], serverId# [1:867:2685], sessionId# [0:0:0] >> TSchemeShardSecretTest::CreateNotInDatabase >> TSchemeShardSecretTest::EmptySecretName >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:52.288934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:52.289274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:52.289281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:52.289294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:52.289300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:52.289304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:52.289617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:52.289898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:52.290923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:52.308062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:52.765581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:52.765604Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:52.819437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:52.819474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:52.819519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:52.831408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:52.831877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:52.832552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:52.832623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:52.838581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:52.838621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:52.839152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:52.839162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:52.839184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:52.839191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:52.839197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:52.839219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.842535Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:52.967137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:52.967209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.967265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:52.967271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:52.970184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:52.970200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:52.971450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:52.971488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:52.971538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.971546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:52.971551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:52.971556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:52.972188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.972198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:52.972204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:52.977039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.977049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:52.977055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:52.977062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:52.978298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:52.984176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:52.984213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:52.984392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:52.984417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:52.984424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:52.984482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:52.984488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:52.984515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:52.984525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:52.985526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:52.985534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... chemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:54.311051Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.311055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-10-24T16:23:54.311059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:54.311088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.311093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:54.311102Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:54.311105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:54.311110Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:54.311113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:54.311117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:54.311121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:54.311125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:54.311128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:54.311140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:54.311145Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:54.311148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:54.311152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:54.311241Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:54.311250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:54.311254Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:54.311258Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:54.311262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:54.311352Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:54.311362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:54.311365Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:54.311369Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:54.311372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:54.311379Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:54.311738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:54.311912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:54.311951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:54.311957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:54.312000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:54.312014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:54.312019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:311:2300] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-10-24T16:23:54.312572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:54.312591Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2025-10-24T16:23:54.312607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128, at schemeshard: 72057594046678944 2025-10-24T16:23:54.316099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:54.316136Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128, operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:23:54.316180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:54.316185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:54.316232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:54.316249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:54.316253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:318:2307] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:54.316301Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-10-24T16:23:54.316318Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 19us result status StatusSuccess 2025-10-24T16:23:54.316387Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> Cdc::UuidExchange[PqRunner] >> TSchemeShardSecretTest::DropUnexistingSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:22.617211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:22.617235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.617240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:22.617246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:22.617252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:22.617256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:22.617264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:22.617275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:22.617368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:22.617419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:22.719653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:22.719672Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:22.745500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:22.745542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:22.746619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:22.757783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:22.758478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:22.758833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.758931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:22.762516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.762573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:22.763599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.763610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:22.763632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:22.763640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:22.763645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:22.763667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.766431Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:22.865590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:22.865645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.865699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:22.865705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:22.865766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:22.865780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:22.872310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.872364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:22.872421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.872432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:22.872437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:22.872442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:22.876240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.876253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:22.876258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:22.879172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.879184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:22.879194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.879201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:22.882803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:22.884309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:22.885306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:22.885485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:22.885515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:22.885522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.885575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:22.885583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:22.885609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:22.885619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:22.892224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:22.892238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_b ... d: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:47.384304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:47.384308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:23:47.384313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:23:47.384317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-10-24T16:23:47.384704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:47.384716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:23:47.384733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:47.384738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:47.384744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:47.384747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:47.384752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-10-24T16:23:47.384759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:47.384764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:23:47.384768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:23:47.384809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:47.384816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-10-24T16:23:47.384822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-10-24T16:23:47.384825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:23:47.384983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:47.384996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:47.385001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:47.385006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:23:47.385010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:47.385082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:47.385087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:47.385097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:23:47.385179Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2025-10-24T16:23:47.385225Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2025-10-24T16:23:47.385239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:47.385248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:47.385253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:47.385257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-10-24T16:23:47.385262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:23:47.385273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-10-24T16:23:47.385306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-10-24T16:23:47.385723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-10-24T16:23:47.396084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:47.396128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:23:47.404380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:47.404440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-10-24T16:23:47.404457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:23:47.404598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:47.404608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:47.404708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:47.404743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:47.404751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:795:2694] TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:48.012412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:48.012561Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 182us result status StatusSuccess 2025-10-24T16:23:48.013547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_Select [GOOD] Test command err: Trying to start YDB, gRPC: 12702, MsgBus: 8375 2025-10-24T16:23:45.634133Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824791617202005:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:45.634153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001852/r3tmp/tmphY0ZG3/pdisk_1.dat 2025-10-24T16:23:45.724156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:45.741824Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:45.747281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.747306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12702, node 1 2025-10-24T16:23:45.751393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.775473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:45.775487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:45.775489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:45.775532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8375 TClient is connected to server localhost:8375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:23:45.910650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:45.914311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:46.013989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:46.639776Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:46.912622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824795912169946:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.912652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.912822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824795912169956:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.912829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.657074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.901276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:48.030307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824804502104733:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.030331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.030438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824804502104738:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.030444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824804502104739:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.030503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.034469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:48.054595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824804502104742:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:23:48.132383Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824804502104793:2461] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 16133, MsgBus: 27144 2025-10-24T16:23:52.966099Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:52.972108Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:53.106531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001852/r3tmp/tmppiCZLF/pdisk_1.dat 2025-10-24T16:23:53.252531Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:53.269679Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:53.269707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:53.272639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16133, node 2 2025-10-24T16:23:53.316055Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:53.316069Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:53.316071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:53.316119Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27144 2025-10-24T16:23:53.648306Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:53.696954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:53.703871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:53.940418Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:53.941592Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824827742532452:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:53.941612Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:53.941735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824827742532464:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:53.942934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:53.948307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:23:53.948460Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824827742532466:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:54.049071Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824832037499833:2341] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> Cdc::KeysOnlyLog[PqRunner] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-10-24T16:23:45.906665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.982790Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.982888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.982898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001583/r3tmp/tmpdeB4k8/pdisk_1.dat 2025-10-24T16:23:46.281740Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:46.283311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:46.283417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:46.283535Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323025314817 != 1761323025314820 2025-10-24T16:23:46.318690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:46.375612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:46.409832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:46.508255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:46.752996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.753039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.753052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.753409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:765:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.753429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:46.754485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:46.804313Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:46.919132Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:47.072764Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:836:2667] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:52.684032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> KqpBatchDelete::Large_2 [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:54.016648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:54.016670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.016676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:54.016687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:54.016693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:54.016697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:54.017016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.017029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:54.018273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:54.019417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:54.298934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:54.298958Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:54.336477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:54.336517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:54.337680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:54.349582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:54.350049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:54.350744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.350833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:54.357586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.357633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:54.358139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:54.358150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.358177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:54.358186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:54.358193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:54.358220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.361894Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:54.485552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:54.485636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.485700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:54.485709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:54.485785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:54.485800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:54.486541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.486585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:54.486643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.486652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:54.486658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:54.486664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:54.487079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.487090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:54.487096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:54.487420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.487430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.487437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.487444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:54.488201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:54.489688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:54.489726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:54.490211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.490238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:54.490245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.490306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:54.490313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.490344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:54.490355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:54.491310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:54.491320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... D DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:54.998707Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:54.998722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:54.998728Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:54.998736Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:23:54.998741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:54.998869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:54.998882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:23:54.998888Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:23:54.998893Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:54.998898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:54.998908Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:23:54.999661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:23:54.999772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:23:54.999820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:54.999828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:54.999919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:54.999940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:54.999946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:336:2325] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:55.000022Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.000052Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 39us result status StatusSuccess 2025-10-24T16:23:55.000133Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-10-24T16:23:55.000961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.001007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2025-10-24T16:23:55.001015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2025-10-24T16:23:55.001039Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSecret, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:187, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSecret, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:187" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001520Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSecret, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:187, operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:23:55.001579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:55.001585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:55.001647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:55.001668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:344:2333] TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:55.001732Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001759Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 30us result status StatusSuccess 2025-10-24T16:23:55.001827Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:54.891806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:54.891833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.891839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:54.891865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:54.891872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:54.891876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:54.891891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.891906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:54.892020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:54.892079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:54.907274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:54.907296Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:54.913434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:54.913469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:54.913510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:54.917637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:54.917796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:54.917906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.917982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:54.919806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.919866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:54.920106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:54.920118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.920144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:54.920153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:54.920159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:54.920185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.923088Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:54.946743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:54.946811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.946879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:54.946887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:54.946956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:54.946970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:54.947553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.947597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:54.947652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.947663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:54.947669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:54.947675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:54.948534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.948545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:54.948549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:54.948820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.948828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.948832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.948837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:54.949329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:54.949675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:54.949710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:54.949901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.949925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:54.949932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.949988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:54.949996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:54.950023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:54.950034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:54.951959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:54.951971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... r: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2025-10-24T16:23:55.383711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2025-10-24T16:23:55.383716Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-10-24T16:23:55.383720Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 6 2025-10-24T16:23:55.383725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:23:55.383831Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-10-24T16:23:55.383840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-10-24T16:23:55.383844Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-10-24T16:23:55.383860Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-10-24T16:23:55.383863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:23:55.383871Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-10-24T16:23:55.384877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-10-24T16:23:55.384937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-10-24T16:23:55.386399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-10-24T16:23:55.386452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-10-24T16:23:55.386458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-10-24T16:23:55.386516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:23:55.386536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:391:2381] TestWaitNotification: OK eventTxId 105 2025-10-24T16:23:55.386621Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386648Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 37us result status StatusSuccess 2025-10-24T16:23:55.386715Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386782Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 13us result status StatusSuccess 2025-10-24T16:23:55.386819Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386873Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.386883Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 12us result status StatusSuccess 2025-10-24T16:23:55.386933Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000004 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |54.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |54.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |54.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:54.947992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:54.948018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.948024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:54.948035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:54.948042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:54.948047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:54.948059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:54.948072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:54.948186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:54.948240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:54.965734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:54.965753Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:54.971336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:54.971377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:54.971446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:54.973133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:54.973314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:54.973443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:54.973530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:54.974560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.974602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:54.974853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:54.974868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:54.974899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:54.974908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:54.974914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:54.974941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:54.976436Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.000094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.000160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.000212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.000221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.000293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.000307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.001066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.001164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.001182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.001187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.001783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.001814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.002237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.002250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.002256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.002262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.002950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.003543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.003597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.003793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.003822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.003830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.003911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.003921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.003950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.003965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.004443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.004453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 55.355958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-10-24T16:23:55.355962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:23:55.355966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 3 FAKE_COORDINATOR: Erasing txId 103 2025-10-24T16:23:55.356022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.356028Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:23:55.356038Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:55.356042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:55.356047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:55.356050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:55.356053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-10-24T16:23:55.356059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:55.356063Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:23:55.356069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:23:55.356081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:55.356086Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-10-24T16:23:55.356090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:23:55.356094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-10-24T16:23:55.356097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:23:55.356236Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356254Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:55.356258Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:23:55.356263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:55.356390Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356406Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:55.356410Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-10-24T16:23:55.356414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:55.356968Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:55.356987Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:55.356991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:23:55.356995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:23:55.357007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-10-24T16:23:55.357063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:23:55.357069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:23:55.357075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:55.357888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:55.358853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:55.358877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:55.358931Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2025-10-24T16:23:55.359005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-10-24T16:23:55.359011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2025-10-24T16:23:55.359022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-10-24T16:23:55.359025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-10-24T16:23:55.359094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-10-24T16:23:55.359110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:23:55.359115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:365:2355] 2025-10-24T16:23:55.359130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-10-24T16:23:55.359143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:23:55.359146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:365:2355] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2025-10-24T16:23:55.359207Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.359228Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 29us result status StatusPathDoesNotExist 2025-10-24T16:23:55.359264Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::WriteSessionCloseWaitsForWrites >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:55.014474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:55.014499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.014505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:55.014517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:55.014524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:55.014529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:55.014543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.014556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:55.014667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:55.014732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:55.031032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:55.031057Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.035605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:55.035643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:55.035698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:55.037101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:55.037257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:55.037380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.037460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:55.040491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.040538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:55.040766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.040777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.040802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:55.040810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.040817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:55.040844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.042225Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.064917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.064984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.065032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.065038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.065095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.065105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.065762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.065798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.065846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.065853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.065857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.065861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.066219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.066229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.066233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.066604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.066613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.066617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.066622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.067120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.067476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.067506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.067658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.067676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.067681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.067727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.067731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.067755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.067762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.068096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.068105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... edSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.594735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.594762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.594935Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.594956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.594962Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.595009Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.595016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.595039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.595049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.595375Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.595382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.595414Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.595419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:23:55.595468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.595474Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:23:55.595486Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:23:55.595490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.595496Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:23:55.595499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.595504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:23:55.595511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.595516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:23:55.595520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:23:55.595530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:55.595537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:23:55.595542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:23:55.595625Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:23:55.595636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:23:55.595641Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:23:55.595646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:23:55.595651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.595661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:23:55.597517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:23:55.597598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.597668Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:277:2266] Bootstrap 2025-10-24T16:23:55.597863Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:277:2266] Become StateWork (SchemeCache [2:282:2271]) 2025-10-24T16:23:55.597910Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:55.597937Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 35us result status StatusPathDoesNotExist 2025-10-24T16:23:55.597976Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-10-24T16:23:55.598080Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:277:2266] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:23:55.598365Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2025-10-24T16:23:55.599064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.599088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2025-10-24T16:23:55.599103Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128, at schemeshard: 72057594046678944 2025-10-24T16:23:55.600816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.600859Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp:128, operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:55.600910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:55.600916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:55.600962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:55.600977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:55.600982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:293:2282] TestWaitNotification: OK eventTxId 101 |54.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret |54.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:55.288503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:55.288531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.288538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:55.288548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:55.288555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:55.288559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:55.288572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.288585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:55.288709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:55.288765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:55.317356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:55.317378Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.322486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:55.322517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:55.322570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:55.326227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:55.326385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:55.326480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.326535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:55.327342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.327372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:55.327551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.327559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.327580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:55.327586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.327592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:55.327610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.337377Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.548754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.548819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.548867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.548873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.548934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.548945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.551323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.551366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.551422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.551432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.551436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.551441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.551881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.551893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.551897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.552185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.552194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.552199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.552206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.552810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.553158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.553203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.553387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.553409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.553415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.553467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.553473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.553498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.553508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.553866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.553874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ntLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202372Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202383Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 13us result status StatusSuccess 2025-10-24T16:23:56.202421Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202469Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202480Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 13us result status StatusSuccess 2025-10-24T16:23:56.202531Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "dir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202574Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202583Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 10us result status StatusSuccess 2025-10-24T16:23:56.202622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202665Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.202674Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 10us result status StatusSuccess 2025-10-24T16:23:56.202722Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "dir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:55.198728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:55.198754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.198760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:55.198773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:55.198779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:55.198784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:55.198797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.198811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:55.198957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:55.199019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:55.213633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:55.213659Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.217467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:55.217504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:55.217560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:55.221246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:55.221385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:55.221478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.221544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:55.222443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.222487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:55.222705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.222716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.222743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:55.222751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.222757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:55.222781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.224253Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.246752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.246838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.246895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.246903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.246976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.246989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.250110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.250161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.250228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.250240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.250246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.250251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.253181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.253205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.253215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.256170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.256193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.256200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.256209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.257000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.258613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.258671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.258909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.258943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.258952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.259020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.259030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.259062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.259073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.259674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.259684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... BUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-10-24T16:23:56.440273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:56.440333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSecret, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp:125" TxId: 104 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:56.440350Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeSecret, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp:125, operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2025-10-24T16:23:56.440577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-10-24T16:23:56.440595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-10-24T16:23:56.440644Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:56.440658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:56.440663Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000004 2025-10-24T16:23:56.440677Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 128 -> 240 2025-10-24T16:23:56.440695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 103 2025-10-24T16:23:56.441192Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:56.441203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:56.441225Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:56.441231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-10-24T16:23:56.441262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:56.441267Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:23:56.441276Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:56.441280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:56.441284Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:56.441287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:56.441291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-10-24T16:23:56.441295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:56.441299Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:23:56.441302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:23:56.441312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:56.441316Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-10-24T16:23:56.441319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-10-24T16:23:56.441395Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:56.441403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:56.441407Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:56.441411Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:23:56.441415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:56.441425Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-10-24T16:23:56.441874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-10-24T16:23:56.441919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:56.441925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-10-24T16:23:56.441935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:23:56.441938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:23:56.441992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:56.442007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:56.442011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:358:2347] 2025-10-24T16:23:56.442028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:23:56.442036Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:23:56.442040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:358:2347] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-10-24T16:23:56.442092Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.442111Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 23us result status StatusSuccess 2025-10-24T16:23:56.442176Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> BasicUsage::GetAllStartPartitionSessions >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:55.076447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:55.076623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.076631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:55.076646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:55.076653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:55.076658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:55.076877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.076892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:55.077302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:55.077722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:55.185122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:55.185151Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.191272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:55.191315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:55.191376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:55.198590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:55.198848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:55.199159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.199238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:55.200665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.200705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:55.201044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.201053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.201082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:55.201089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.201095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:55.201119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.203165Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.267433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.267508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.267570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.267576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.268735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.268754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.272162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.272211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.272273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.272284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.272290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.272295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.273210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.273222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.273226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.273784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.273795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.273801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.273807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.276204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.277376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.277824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.278135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.278162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.278171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.278237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.278245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.278277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.278289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.279049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.279059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-10-24T16:23:56.937806Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 3/3 2025-10-24T16:23:56.937809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-10-24T16:23:56.937812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2025-10-24T16:23:56.937815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-10-24T16:23:56.937819Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:56.937823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:56.937830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:23:56.937834Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:1 2025-10-24T16:23:56.937836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:1 2025-10-24T16:23:56.937840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:56.937844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:2 2025-10-24T16:23:56.937847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:2 2025-10-24T16:23:56.937851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-10-24T16:23:56.937855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2025-10-24T16:23:56.937859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-10-24T16:23:56.937861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-10-24T16:23:56.937864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-10-24T16:23:56.937867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-10-24T16:23:56.938207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938230Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:56.938235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-10-24T16:23:56.938240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:56.938344Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:56.938360Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-10-24T16:23:56.938363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:56.938463Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938476Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:56.938480Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-10-24T16:23:56.938483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:56.938670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:56.938688Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:56.938693Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-10-24T16:23:56.938697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:23:56.938708Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:56.939069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:56.939088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:56.939368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:56.939384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:56.939427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:56.939434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:56.939487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:56.939503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:56.939508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:322:2311] TestWaitNotification: OK eventTxId 101 2025-10-24T16:23:56.939578Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-10-24T16:23:56.939605Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 35us result status StatusSuccess 2025-10-24T16:23:56.939676Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 20119, MsgBus: 9648 2025-10-24T16:23:38.615523Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824762202380198:2248];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.615533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001578/r3tmp/tmpTaSane/pdisk_1.dat 2025-10-24T16:23:38.672738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.672760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.675551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:38.677056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.687666Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20119, node 1 2025-10-24T16:23:38.691895Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824762202379987:2081] 1761323018611334 != 1761323018611337 2025-10-24T16:23:38.712071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.712084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.712086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.712134Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9648 TClient is connected to server localhost:9648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.824623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.832194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:38.848474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.864791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:38.896637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.936699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.964707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.080514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824766497348930:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.080537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.080768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824766497348940:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.080778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.140459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.149462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.166440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.175776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.203188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.224478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.244661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.269533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.320360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824766497349802:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.320387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.320552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824766497349807:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.320561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824766497349808:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.320619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... FIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17669 TClient is connected to server localhost:17669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:47.420245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:47.426377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:47.524635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:47.560819Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:47.576751Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:23:47.588951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.603554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:47.796309Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824800397941871:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.796329Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.796406Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824800397941881:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.796410Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:47.811725Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.839964Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.864572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.891357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.907572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:47.942443Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:48.007255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:48.036838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:48.065854Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824804692910044:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.065879Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.066012Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824804692910049:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.066022Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824804692910050:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.066026Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:48.066774Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:48.071012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:23:48.071081Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824804692910053:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:23:48.160797Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824804692910105:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:48.298884Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:48.697690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... >> ObjectStorageListingTest::FilterListing [GOOD] >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:55.676605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:55.676869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.676876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:55.676887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:55.676895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:55.676899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:55.677149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:55.677164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:55.678044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:55.678863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:55.732394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:55.732418Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.745339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:55.745369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:55.745417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:55.750822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:55.751219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:55.751727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.751787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:55.753335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.753368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:55.753727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.753736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:55.753757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:55.753763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:55.753769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:55.753788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.756749Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:55.879719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:55.879796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.879867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:55.879875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:55.882647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:55.882662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.884659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.884700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:55.884756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.884764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:55.884770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:55.884776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:55.885421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.885431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:55.885436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:55.886257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.886265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:55.886271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.886277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:55.890053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:55.893857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:55.894459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:55.894649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:55.894674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:55.894680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.894735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:55.894741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:55.894766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:55.894776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:55.901680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:55.901692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:58.129486Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:58.129490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-10-24T16:23:58.129494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-10-24T16:23:58.129525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.129532Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-10-24T16:23:58.129542Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:58.129547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:58.129552Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#101:0 progress is 1/1 2025-10-24T16:23:58.129556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:58.129561Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-10-24T16:23:58.129566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-10-24T16:23:58.129570Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 101:0 2025-10-24T16:23:58.129574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 101:0 2025-10-24T16:23:58.129584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:58.129588Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-10-24T16:23:58.129592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:23:58.129595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:23:58.129686Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:58.129695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:58.129701Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:58.129705Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:23:58.129708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:23:58.129780Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:58.129788Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:23:58.129792Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:23:58.129795Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:23:58.129799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:58.129805Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:23:58.130259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:23:58.130276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:23:58.130309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:23:58.130314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:23:58.130355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:58.130367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:23:58.130372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:307:2296] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-10-24T16:23:58.130951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:58.130978Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2025-10-24T16:23:58.131002Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp:125, at schemeshard: 72057594046678944 2025-10-24T16:23:58.136396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp:125" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-10-24T16:23:58.136442Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp:125, operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:23:58.136496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:23:58.136502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:23:58.136554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:23:58.136568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:23:58.136572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:314:2303] TestWaitNotification: OK eventTxId 102 2025-10-24T16:23:58.136627Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-10-24T16:23:58.136648Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 26us result status StatusSuccess 2025-10-24T16:23:58.136729Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BasicUsage::BasicWriteSession |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |54.7%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] >> BasicUsage::WriteSessionWriteInHandlers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::UniqueIndexValidationFailsBetweenShards [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:34.584917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:34.584939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.584945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:34.584951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:34.584957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:34.584962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:34.584972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:34.584985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:34.585065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:34.585117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:34.600619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:34.600642Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:34.600720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:34.602825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:34.602887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:34.602913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:34.604745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:34.604841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:34.604916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.604973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:34.605660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.605690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:34.605847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:34.605854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:34.605874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:34.605879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:34.605884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:34.605900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:34.606905Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:34.623245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:34.623314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.623355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:34.623361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:34.623403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:34.623436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:34.623984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:34.624062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:34.624072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:34.624076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:34.624409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:34.624811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:34.624827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:34.624831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:34.625330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:34.625684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:34.625710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:34.625850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:34.625869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... : TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:23:50.147428Z node 429 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710761:0 progress is 1/1 2025-10-24T16:23:50.147431Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:23:50.147435Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-10-24T16:23:50.147445Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [429:139:2160] message: TxId: 281474976710761 2025-10-24T16:23:50.147451Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:23:50.147455Z node 429 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710761:0 2025-10-24T16:23:50.147458Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710761:0 2025-10-24T16:23:50.147469Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-10-24T16:23:50.148648Z node 429 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:23:50.148665Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:23:50.148675Z node 429 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2470: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 1003, txId# 281474976710761 2025-10-24T16:23:50.148692Z node 429 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2473: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha), SubscribersCount: 1, CreateSender: [429:422:2393], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 3, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710761 2025-10-24T16:23:50.150400Z node 429 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejection_Unlocking 2025-10-24T16:23:50.150427Z node 429 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejection_Unlocking TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha), SubscribersCount: 1, CreateSender: [429:422:2393], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 3, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:23:50.150435Z node 429 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-10-24T16:23:50.151286Z node 429 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejected 2025-10-24T16:23:50.151315Z node 429 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 1003 Rejected TBuildInfo{ IndexBuildId: 1003, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 3], IndexType: EIndexTypeGlobalUnique, IndexName: Index, IndexColumn: index_key_first_part, IndexColumn: index_key_second_part, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha), SubscribersCount: 1, CreateSender: [429:422:2393], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 3, Processed: UploadRows: 3 UploadBytes: 80 ReadRows: 6 ReadBytes: 144 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:23:50.151320Z node 429 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 1003, subscribers count# 1 2025-10-24T16:23:50.151338Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:23:50.151344Z node 429 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [429:423:2394] TestWaitNotification: OK eventTxId 1003 2025-10-24T16:23:50.151438Z node 429 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-10-24T16:23:50.151502Z node 429 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/Table" index { name: "Index" index_columns: "index_key_first_part" index_columns: "index_key_second_part" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "Duplicate key found: (index_key_first_part=42, index_key_second_part=alpha)" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/Table" index { name: "Index" index_columns: "index_key_first_part" index_columns: "index_key_second_part" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-10-24T16:23:50.151580Z node 429 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:50.151623Z node 429 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 46us result status StatusSuccess 2025-10-24T16:23:50.151750Z node 429 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index_key_first_part" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index_key_second_part" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-10-24T16:23:55.168403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:55.343317Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:55.343404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:55.343415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00161a/r3tmp/tmpjdjATG/pdisk_1.dat 2025-10-24T16:23:55.968290Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.970973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:55.971012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:55.971137Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323029426535 != 1761323029426538 2025-10-24T16:23:56.008352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:56.117879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:56.186850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:56.280206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:56.445244Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:23:56.446115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:23:56.776083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:23:56.776130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:23:56.776936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:23:56.776948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:23:56.776956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:23:56.777022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:23:56.777057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:23:56.777075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:23:56.788302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:23:56.892001Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:23:56.892095Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:23:56.892146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:23:56.892152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:56.892157Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:23:56.892162Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:56.892346Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:23:56.892371Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:23:56.892387Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:56.892394Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:56.892403Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:23:56.892408Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:56.892418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:23:56.892536Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:56.892604Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:23:56.892620Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:23:56.893201Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:56.904443Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:56.904488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:23:57.061861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:23:57.063661Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:57.063678Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:57.063730Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:57.063739Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:57.063750Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:57.063830Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:23:57.063876Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:57.066381Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:57.066399Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:23:57.082996Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:57.083146Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:57.083574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:23:57.083583Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:57.083636Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:23:57.083649Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:57.083956Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:57.083965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:57.083971Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:57.083989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:23:57.083999Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:57.084010Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:57.099406Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:57.107004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:23:57.113355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:57.113380Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:57.123528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:57.123577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:57.123590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:57.123783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:57.123801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:57.127343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:57.150735Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:57.196269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:57.304520Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:57.304961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:23:57.521272Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:827:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:58.459321Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bghcdvfvr00qa846apetg8, Database: , SessionId: ydb://session/3?node_id=1&id=NWJhN2IyMWQtYWFiYjUyYmUtNGNjODhkZGItMjRkMDM4MTE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:23:58.465032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:858:2677], serverId# [1:859:2678], sessionId# [0:0:0] 2025-10-24T16:23:58.465248Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:23:58.465309Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-10-24T16:23:58.480168Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:58.482591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:866:2684], serverId# [1:867:2685], sessionId# [0:0:0] 2025-10-24T16:23:58.482644Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-10-24T16:23:58.482701Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-10-24T16:23:58.482731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:866:2684], serverId# [1:867:2685], sessionId# [0:0:0] 2025-10-24T16:23:58.482940Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:872:2690], serverId# [1:873:2691], sessionId# [0:0:0] 2025-10-24T16:23:58.482957Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-10-24T16:23:58.482971Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-10-24T16:23:58.482987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:872:2690], serverId# [1:873:2691], sessionId# [0:0:0] >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:23:57.612933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:23:57.612966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:57.612972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:23:57.612985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:23:57.612993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:23:57.612998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:23:57.613606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:23:57.613622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:23:57.614639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:23:57.615590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:23:57.825217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:23:57.825248Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:57.856345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:23:57.856385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:23:57.857662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:23:57.867093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:23:57.867548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:23:57.867950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:57.868291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:23:57.870678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:57.870713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:23:57.871205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:57.871214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:57.871235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:23:57.871241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:23:57.871248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:23:57.871270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:23:57.877960Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:23:58.079673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:23:58.079743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.079794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:23:58.079801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:23:58.079899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:23:58.079911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:58.095777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:58.095827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:23:58.095905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.095916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:23:58.095921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:23:58.095926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:23:58.104565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.104590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:23:58.104598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:23:58.105957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.105971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:23:58.105977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:58.105985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:23:58.109360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:23:58.112056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:23:58.113196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:23:58.113399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:58.113432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:58.113440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:58.113501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:23:58.113511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:23:58.113541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:23:58.113552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:23:58.120079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:58.120091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ts: 0/1, is published: true 2025-10-24T16:23:59.657067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-10-24T16:23:59.657100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-10-24T16:23:59.657375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:59.657413Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:23:59.657430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 8589936753 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:23:59.657438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000003 2025-10-24T16:23:59.657468Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 128 -> 240 2025-10-24T16:23:59.657496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:23:59.657506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 103 2025-10-24T16:23:59.658514Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:23:59.658521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:23:59.658542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:23:59.658560Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:23:59.658564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:441:2399], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:23:59.658568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:441:2399], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-10-24T16:23:59.658621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:23:59.658626Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:23:59.658637Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:59.658641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:59.658646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:23:59.658649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:59.658653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-10-24T16:23:59.658658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:23:59.658662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:23:59.658666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:23:59.658675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:23:59.658681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-10-24T16:23:59.658685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-10-24T16:23:59.658688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-10-24T16:23:59.658758Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:59.658768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:59.658772Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:59.658777Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:23:59.658781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:23:59.658847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:59.658856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:23:59.658859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:23:59.658873Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:23:59.658877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:23:59.658884Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-10-24T16:23:59.659350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:23:59.659496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:23:59.659550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:23:59.659556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:23:59.659610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:23:59.659625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:23:59.659629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:477:2432] TestWaitNotification: OK eventTxId 103 2025-10-24T16:23:59.659695Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:23:59.659718Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 31us result status StatusSuccess 2025-10-24T16:23:59.659778Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> BasicUsage::RetryDiscoveryWithCancel >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> BasicUsage::FallbackToSingleDb >> ExternalBlobsMultipleChannels::Simple >> KqpRbo::Bench_CrossFilter >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] |54.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> BasicUsage::PropagateSessionClosed >> KqpRbo::OrderBy |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |54.7%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |54.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> ExternalBlobsMultipleChannels::SingleChannel |54.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |54.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> ExternalBlobsMultipleChannels::ChangeExternalCount |54.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRbo::LeftJoinToKqpOpJoin |54.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::WriteSessionNoAvailableDatabase |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpRbo::Bench_Filter >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> TPQCachingProxyTest::TestDeregister >> Cdc::KeysOnlyLogDebezium >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> KqpRbo::UnionAll |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> KqpRbo::PredicatePushdownLeftJoin >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> KqpRbo::LeftJoinToKqpOpJoin [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] >> KqpRbo::OrderBy [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce |54.8%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCachingProxyTest::MultipleSessions [GOOD] >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::OrderBy [GOOD] Test command err: Trying to start YDB, gRPC: 62870, MsgBus: 19498 2025-10-24T16:24:01.252922Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824859179431937:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.252939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.260251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001822/r3tmp/tmpkSysKI/pdisk_1.dat 2025-10-24T16:24:01.286857Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62870, node 1 2025-10-24T16:24:01.404775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.404806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.405435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:01.415918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:01.592326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:01.592340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:01.592342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:01.592394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19498 TClient is connected to server localhost:19498 2025-10-24T16:24:02.252020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:02.414069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:02.442573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:03.848150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824867769367174:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.848241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.848385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824867769367184:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.848389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.848476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824867769367186:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.848483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.434935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.545968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.590080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872064334664:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.590120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.590218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872064334669:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.590220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872064334670:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.590227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.591162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:04.600228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824872064334673:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:24:04.686465Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824872064334724:2461] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-10-24T16:24:05.384206Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:24:05.417865Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-10-24T16:24:05.417901Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-10-24T16:24:05.417913Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:24:05.417924Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-10-24T16:24:05.423168Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:05.423199Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-10-24T16:24:05.423220Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-10-24T16:24:05.423227Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2025-10-24T16:24:05.423234Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-10-24T16:24:05.423246Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-10-24T16:24:05.423254Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2025-10-24T16:24:05.423263Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2025-10-24T16:24:05.423268Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 >> ExternalBlobsMultipleChannels::Simple [GOOD] >> KqpRbo::Bench_CrossFilter [GOOD] >> KqpRbo::Bench_10Joins ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: 2025-10-24T16:24:03.086862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:03.442673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:03.442747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:03.442756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014e8/r3tmp/tmppyl1ys/pdisk_1.dat 2025-10-24T16:24:04.343942Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:04.347338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:04.347647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:04.347767Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323041648437 != 1761323041648440 2025-10-24T16:24:04.388316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:04.474576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:04.523747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:04.615809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:05.030878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-10-24T16:24:05.150913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:05.329873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-10-24T16:24:05.186379Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:24:05.219082Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-10-24T16:24:05.219114Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-10-24T16:24:05.219127Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:24:05.219140Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-10-24T16:24:05.225881Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:05.225912Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-10-24T16:24:05.225921Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2025-10-24T16:24:05.225943Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> BsControllerConfig::DeleteStoragePool [GOOD] >> KqpRbo::UnionAll [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> KqpBatchDelete::ManyPartitions_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::LeftJoinToKqpOpJoin [GOOD] Test command err: Trying to start YDB, gRPC: 13039, MsgBus: 26444 2025-10-24T16:24:01.767214Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824859608028902:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.767234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001817/r3tmp/tmphIX3MX/pdisk_1.dat 2025-10-24T16:24:01.827905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:01.846654Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:01.847224Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824859608028878:2081] 1761323041767059 != 1761323041767062 TServer::EnableGrpc on GrpcPort 13039, node 1 2025-10-24T16:24:01.878737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:01.878749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:01.878751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:01.878789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:01.879132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.879153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.880818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26444 TClient is connected to server localhost:26444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:01.964051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:01.966556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:02.101180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:02.771364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:03.835081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824868197964164:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.835119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.839929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824868197964174:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:03.839947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.183091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.311500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.358367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.409769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.462815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872492931805:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.462836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.462950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872492931810:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.462960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824872492931811:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.462964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.464079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:04.475372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824872492931814:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:24:04.559362Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824872492931865:2566] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-10-24T16:24:01.301393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:01.342292Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:01.342379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:01.342407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014e7/r3tmp/tmpUaQu0L/pdisk_1.dat 2025-10-24T16:24:01.423137Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:01.423937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.423972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.424135Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323040783429 != 1761323040783432 2025-10-24T16:24:01.455185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:01.504132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:01.537273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:01.623626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:01.832948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.832980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.832992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.833252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:765:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.833266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.834133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:01.884353Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:01.980315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:02.040184Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:836:2667] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 24389, MsgBus: 13687 2025-10-24T16:23:38.330801Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824761135875746:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.330870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014e6/r3tmp/tmpGRuF3b/pdisk_1.dat 2025-10-24T16:23:38.427776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.427808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.428819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:38.430426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:38.434885Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24389, node 1 2025-10-24T16:23:38.446889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.446901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.446903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.446941Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13687 TClient is connected to server localhost:13687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.541002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.546391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:23:38.548389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.569861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.597904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.620853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.777800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761135877135:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.777824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.777917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761135877145:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.777926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.836187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.848214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.857506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.869941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.883572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.897542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.913246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.927118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.960641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761135878013:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.960664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.960798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761135878018:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.960807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824761135878019:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.960812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.961684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runn ... 0383Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:05.820422Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18002 TClient is connected to server localhost:18002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:05.908538Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:05.914953Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:24:05.919574Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:05.934619Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:05.983924Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:05.994072Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:06.004693Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:06.172405Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824883007381160:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.172420Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.172553Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824883007381170:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.172560Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.191666Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.204953Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.219494Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.232860Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.247036Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.266493Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.279048Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.290512Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.319402Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824883007382032:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.319422Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.319573Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824883007382038:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.319579Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.319586Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824883007382037:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.320434Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:06.324490Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564824883007382041:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:06.420896Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564824883007382093:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } waiting... 2025-10-24T16:24:06.750118Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.801384Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-10-24T16:24:00.219901Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1761323040219892 2025-10-24T16:24:00.416583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824854664225376:2215];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.416684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:00.422732Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824858785589275:2087];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.422761Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:00.428405Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001532/r3tmp/tmpYhv8Se/pdisk_1.dat 2025-10-24T16:24:00.432828Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:00.468329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:00.482121Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:00.484710Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26004, node 1 2025-10-24T16:24:00.515945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.515981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.517747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001532/r3tmp/yandex7apg9Z.tmp 2025-10-24T16:24:00.517756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001532/r3tmp/yandex7apg9Z.tmp 2025-10-24T16:24:00.518688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:00.521313Z INFO: TTestServer started on Port 6298 GrpcPort 26004 2025-10-24T16:24:00.529607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.529631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:6298 PQClient connected to localhost:26004 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:24:00.538752Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:00.539174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:00.542088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001532/r3tmp/yandex7apg9Z.tmp 2025-10-24T16:24:00.542318Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:00.555179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:24:00.726899Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.748434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.947918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854664226123:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.947974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854664226151:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854664226152:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.949403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:00.948476Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858785589557:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948495Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858785589572:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948509Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948902Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858785589578:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.948912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.954769Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824858785589579:2127] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:00.971948Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824854664226155:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:24:00.972082Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824858785589577:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:24:01.028406Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824863080556903:2134] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:01.076437Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824858959193546:2666] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:01.090462Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824858959193556:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please ch ... er readTimeStamp done, result 1761323047332 queuesize 0 startOffset 0 2025-10-24T16:24:07.339546Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' size 177 2025-10-24T16:24:07.339557Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:24:07.339716Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-10-24T16:24:07.339986Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-10-24T16:24:07.340562Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-10-24T16:24:07.340570Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-10-24T16:24:07.340575Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-10-24T16:24:07.343262Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler === AcksHandler has written a message, closing the session 2025-10-24T16:24:07.347262Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: try to update token 2025-10-24T16:24:07.347272Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2025-10-24T16:24:07.352010Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|925e001b-33155242-8cea14f5-578bda05_0 grpc read done: success: 1 data: write_request[data omitted] 2025-10-24T16:24:07.352335Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:07.352349Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:07.352376Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-10-24T16:24:07.352110Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-10-24T16:24:07.352792Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-10-24T16:24:07.353082Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:07.353091Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:07.353110Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-10-24T16:24:07.353118Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:637: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-10-24T16:24:07.353132Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:07.353139Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:07.353142Z node 2 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:24:07.353171Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-10-24T16:24:07.353207Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-10-24T16:24:07.353214Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:07.353256Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000? size 169 WTime 1761323047353 2025-10-24T16:24:07.353275Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:24:07.353285Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-10-24T16:24:07.354444Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7564824884555393850:2390] 2025-10-24T16:24:07.354458Z node 2 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:24:07.354470Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:24:07.354476Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:24:07.354484Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-10-24T16:24:07.354505Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:07.354507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:07.354509Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:07.354516Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:24:07.354524Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-10-24T16:24:07.354611Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' size 169 2025-10-24T16:24:07.354714Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-10-24T16:24:07.355770Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-10-24T16:24:07.355803Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-10-24T16:24:07.355808Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-10-24T16:24:07.355812Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-10-24T16:24:07.355936Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-10-24T16:24:07.355965Z :INFO: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-10-24T16:24:07.355968Z :INFO: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session will now close 2025-10-24T16:24:07.355973Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: aborting 2025-10-24T16:24:07.356041Z :WARNING: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-10-24T16:24:07.356045Z :DEBUG: [/Root] TraceId [] SessionId [src_id|925e001b-33155242-8cea14f5-578bda05_0] MessageGroupId [src_id] Write session: destroy 2025-10-24T16:24:07.356448Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|925e001b-33155242-8cea14f5-578bda05_0 grpc read done: success: 0 data: 2025-10-24T16:24:07.356454Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|925e001b-33155242-8cea14f5-578bda05_0 grpc read failed 2025-10-24T16:24:07.356461Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|925e001b-33155242-8cea14f5-578bda05_0 grpc closed 2025-10-24T16:24:07.356465Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|925e001b-33155242-8cea14f5-578bda05_0 is DEAD 2025-10-24T16:24:07.356620Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:24:07.357020Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [1:7564824884728998384:2463] destroyed 2025-10-24T16:24:07.357033Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:24:07.357039Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:07.357041Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:07.357043Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:07.360046Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:07.360053Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:07.360054Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> TPQCachingProxyTest::TestPublishAndForget ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:213:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:213:2077] 2025-10-24T16:23:40.473583Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:40.474005Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:40.474105Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:40.474707Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:40.474813Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:40.474875Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:40.474882Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:40.474927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:40.476054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:40.476085Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:40.476117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:40.476133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:40.476145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:40.476154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2025-10-24T16:23:40.488116Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:40.488172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:40.512112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:40.512169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:40.512187Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:40.512200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:40.512230Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:40.512237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:40.512243Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:40.512438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:40.524102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:40.524155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:40.536073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:40.536134Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:40.536375Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:40.536383Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:40.536423Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:40.536436Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:40.539045Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:205:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:205:2077] 2025-10-24T16:23:42.261688Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:42.261877Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:42.261957Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:42.262172Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:42.262215Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:42.262266Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:42.262272Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:42.262316Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:42.263364Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:42.263395Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:42.263429Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:42.263444Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:42.263455Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:42.263464Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2025-10-24T16:23:42.274857Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:42.274929Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:42.296400Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:42.296461Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:42.296482Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:42.296498Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:42.296530Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:42.296539Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:42.296545Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:42.296553Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:42.307090Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:42.307148Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:42.317623Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:42.317681Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:42.317889Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:42.317908Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:42.317954Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:42.317963Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:42.318153Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3064:2106] recipient: [21:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3064:2106] recipient: [21:2964:2117] Leader for TabletID 72057594037932033 is [21:3066:2119] sender: [21:3067:2106] recipient: [21:2964:2117] 2025-10-24T16:23:45.618025Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:45.618376Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:45.618469Z n ... ev/disk3 2025-10-24T16:23:58.694252Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 96:1000 Path# /dev/disk1 2025-10-24T16:23:58.694257Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 96:1001 Path# /dev/disk2 2025-10-24T16:23:58.694261Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-10-24T16:23:58.694265Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-10-24T16:23:58.694269Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-10-24T16:23:58.694273Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-10-24T16:23:58.694277Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-10-24T16:23:58.694281Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-10-24T16:23:58.694285Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-10-24T16:23:58.694289Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-10-24T16:23:58.694294Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-10-24T16:23:58.694297Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-10-24T16:23:58.694302Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-10-24T16:23:58.694306Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-10-24T16:23:58.694310Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-10-24T16:23:58.694315Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-10-24T16:23:58.694319Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-10-24T16:23:58.694323Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-10-24T16:23:58.694327Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-10-24T16:23:58.694331Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-10-24T16:23:58.694336Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-10-24T16:23:58.694340Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-10-24T16:23:58.694344Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-10-24T16:23:58.694348Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-10-24T16:23:58.694352Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-10-24T16:23:58.694356Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-10-24T16:23:58.694360Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-10-24T16:23:58.694364Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-10-24T16:23:58.694368Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-10-24T16:23:58.694372Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-10-24T16:23:58.694376Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-10-24T16:23:58.694387Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-10-24T16:23:58.694392Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-10-24T16:23:58.694396Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-10-24T16:23:58.694401Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-10-24T16:23:58.694406Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-10-24T16:23:58.694410Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-10-24T16:23:58.694414Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-10-24T16:23:58.694418Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-10-24T16:23:58.694422Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-10-24T16:23:58.694426Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-10-24T16:23:58.694430Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-10-24T16:23:58.694435Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-10-24T16:23:58.694439Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-10-24T16:23:58.694443Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-10-24T16:23:58.694447Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-10-24T16:23:58.694451Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-10-24T16:23:58.694455Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-10-24T16:23:58.694459Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-10-24T16:23:58.694463Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-10-24T16:23:58.694467Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-10-24T16:23:58.694471Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-10-24T16:23:58.694475Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-10-24T16:23:58.694480Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-10-24T16:23:58.694484Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-10-24T16:23:58.694488Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-10-24T16:23:58.694492Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-10-24T16:23:58.694496Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-10-24T16:23:58.694501Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-10-24T16:23:58.694506Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-10-24T16:23:58.694509Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-10-24T16:23:58.694513Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-10-24T16:23:58.694517Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-10-24T16:23:58.694533Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-10-24T16:23:58.694537Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-10-24T16:23:58.694543Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-10-24T16:23:58.694548Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-10-24T16:23:58.694552Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-10-24T16:23:58.694557Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-10-24T16:23:58.694562Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-10-24T16:23:58.694567Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-10-24T16:23:58.694572Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-10-24T16:23:58.694577Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-10-24T16:23:58.694581Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-10-24T16:23:58.694586Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-10-24T16:23:58.708640Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-10-24T16:23:58.758907Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-10-24T16:23:58.790402Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::UnionAll [GOOD] Test command err: Trying to start YDB, gRPC: 30466, MsgBus: 65259 2025-10-24T16:24:04.775562Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824874175971622:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:04.775594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001810/r3tmp/tmpXcPlCM/pdisk_1.dat 2025-10-24T16:24:05.159528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:05.185932Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:05.205690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:05.205924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30466, node 1 2025-10-24T16:24:05.207720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:05.276256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:05.276271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:05.276272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:05.276313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65259 TClient is connected to server localhost:65259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:05.392119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:05.395248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:05.436217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:05.768254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:06.013696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824882765906833:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.013722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.013884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824882765906843:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.013896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.322498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.459447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.471335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.488646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:06.587894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824882765907175:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.587922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.588088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824882765907181:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.588097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.588107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824882765907180:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:06.589027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:06.595144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824882765907184:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:24:06.658628Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824882765907235:2561] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-10-24T16:24:11.360455Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:24:11.540962Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-10-24T16:24:11.540994Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-10-24T16:24:11.541006Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:24:11.541016Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-10-24T16:24:11.558442Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:11.558471Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2025-10-24T16:24:11.558488Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-10-24T16:24:11.558495Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-10-24T16:24:11.558508Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-10-24T16:24:11.558516Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-10-24T16:24:11.558524Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-10-24T16:24:11.558539Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-10-24T16:24:06.223481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:06.373569Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:06.373643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:06.373653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014d9/r3tmp/tmpOo4vpT/pdisk_1.dat 2025-10-24T16:24:06.615699Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:06.616940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:06.616977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:06.617116Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323043178490 != 1761323043178493 2025-10-24T16:24:06.648574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:06.747051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:06.804913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:06.889931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:07.279742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:07.279771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:07.279782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:07.282818Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:765:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:07.282848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:07.287774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:07.332686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:07.437425Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:07.519334Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:836:2667] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> KqpRbo::Bench_10Joins [GOOD] >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter >> TPQCachingProxyTest::OutdatedSession >> KqpRbo::Bench_Filter [GOOD] >> KqpRbo::Bench_JoinFilter >> KqpBatchUpdate::Large_2 [GOOD] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 63001, MsgBus: 29533 2025-10-24T16:24:00.840814Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824856586959731:2251];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.840831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00182d/r3tmp/tmplQclgG/pdisk_1.dat 2025-10-24T16:24:00.963955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:00.964437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.964469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.968508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:00.980910Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63001, node 1 2025-10-24T16:24:01.195840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:01.420794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:01.420805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:01.420807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:01.420851Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:01.807728Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29533 TClient is connected to server localhost:29533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:02.395340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:02.412629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:04.402647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824869471862068:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.415977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.416208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824873766829374:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.416215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.459680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:05.690948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:05.721619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824878061796841:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.721650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.721682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824878061796848:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.721682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824878061796846:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.721691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.722618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:05.725251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824878061796850:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-10-24T16:24:05.809861Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824878061796901:2454] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:05.844190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824856586959731:2251];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:05.844222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14463, MsgBus: 27469 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00182d/r3tmp/tmpaEE1wd/pdisk_1.dat 2025-10-24T16:24:06.557847Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:06.557869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:06.580586Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:06.580940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:06.580953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:06.582509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14463, node 2 2025-10-24T16:24:06.588426Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:06.588441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:06.588443Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:06.588486Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27469 TClient is connected to server localhost:27469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: ... 611887Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:07.619929Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:07.621446Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:7564824887333428018:2081] 1761323047507368 != 1761323047507371 2025-10-24T16:24:07.624297Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:07.624316Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:07.632509Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28970, node 3 2025-10-24T16:24:07.684038Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:07.684051Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:07.684053Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:07.684093Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19348 2025-10-24T16:24:07.872035Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:24:07.996508Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:07.998107Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:08.507904Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:10.629743Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824900218330597:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:10.629771Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:10.629912Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824900218330607:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:10.629921Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:10.693113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:10.764296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:10.807478Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:10.865900Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:10.940627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:10.980106Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:11.023467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:11.055031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:11.107226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:11.154087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:11.194538Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824904513298625:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.194568Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.194660Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824904513298631:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.194666Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824904513298630:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.194680Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.197123Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:11.202731Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824904513298634:2399], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-10-24T16:24:11.271313Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824904513298685:2818] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> KqpRbo::PredicatePushdownLeftJoin [GOOD] >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-10-24T16:24:02.326332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:02.904440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:02.905323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:02.905575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014dd/r3tmp/tmpp0rH3C/pdisk_1.dat 2025-10-24T16:24:03.754358Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:03.766323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:03.766930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:03.767071Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323041497991 != 1761323041497994 2025-10-24T16:24:03.804539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:03.986132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:04.088913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:04.208216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:04.897939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2617], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.897971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.897981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.898700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:763:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.898710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:04.906998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:04.965492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:05.080072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:05.206115Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:834:2666] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-10-24T16:24:13.276039Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:24:13.370755Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-10-24T16:24:13.370789Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-10-24T16:24:13.370802Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:24:13.370812Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-10-24T16:24:13.377647Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:13.377686Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-10-24T16:24:13.377707Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-10-24T16:24:13.377714Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-10-24T16:24:13.377732Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 13333, MsgBus: 7625 2025-10-24T16:23:41.105940Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824776269653818:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:41.105960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014c9/r3tmp/tmpLlxLnK/pdisk_1.dat 2025-10-24T16:23:41.202255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:41.215882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:41.215926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:41.216875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:41.218867Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:41.219049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824776269653781:2081] 1761323021105615 != 1761323021105618 TServer::EnableGrpc on GrpcPort 13333, node 1 2025-10-24T16:23:41.240156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:41.240169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:41.240170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:41.240218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7625 TClient is connected to server localhost:7625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:41.346064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:41.355787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:41.412209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:41.436457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.486121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.525525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.543713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:41.712488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824776269655431:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.712514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.712739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824776269655441:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.712747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.823118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.834281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.852035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.865392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.874219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.896472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.927183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.944163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.969968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824776269656305:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.970005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.970127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824776269656310:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.970134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824776269656311:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.970144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... tus: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:01.975912Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:01.988753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:01.999660Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:24:02.029167Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.044866Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:02.095381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:02.355056Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520959526:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.355143Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.358733Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.365434Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.367190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520959688:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.367212Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.367505Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520959697:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.367523Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.389461Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.406398Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.421903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.437789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.452428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.466525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:02.484140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520960395:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.484179Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.484268Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520960405:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.484277Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824864520960406:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.484291Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.484948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:02.488807Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824864520960409:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:02.588328Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824864520960461:3558] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:02.901441Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:02.940825Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TPQCachingProxyTest::OutdatedSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::PredicatePushdownLeftJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17970, MsgBus: 3986 2025-10-24T16:24:06.225073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824881779883926:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:06.225100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00180e/r3tmp/tmpW6qTX5/pdisk_1.dat 2025-10-24T16:24:06.549822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:06.557129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:06.557156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:06.561552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17970, node 1 2025-10-24T16:24:06.574188Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:06.595897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:06.595908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:06.595910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:06.595953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3986 TClient is connected to server localhost:3986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:24:06.715643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:06.894036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:07.261589Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:11.228016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824881779883926:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:11.229560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:24:13.377055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824911844655580:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.377307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.377441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824911844655590:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.377445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.532547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:13.716405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:13.817735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824911844655768:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.817763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.817901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824911844655773:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.817908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824911844655774:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.817913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:13.826076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:13.846927Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824911844655777:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:24:13.946967Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824911844655828:2473] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |54.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |54.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::CloseWriteSessionImmediately [GOOD] |54.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> KqpBatchDelete::ManyPartitions_3 [GOOD] >> JsonChangeRecord::Heartbeat [GOOD] >> JsonChangeRecord::DataChangeVersion [GOOD] >> KqpRbo::Bench_JoinFilter [GOOD] |54.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |54.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-10-24T16:23:59.129007Z :BasicWriteSession INFO: Random seed for debugging is 1761323039128993 2025-10-24T16:23:59.803956Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824853698531893:2262];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:59.803979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:59.820092Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00153a/r3tmp/tmph0lVjV/pdisk_1.dat 2025-10-24T16:23:59.825813Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824852473339723:2204];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:59.825838Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:59.832966Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:59.861157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:59.871451Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:59.902594Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8727, node 1 2025-10-24T16:23:59.909161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:59.909187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:59.912437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:59.913214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00153a/r3tmp/yandexG3mmHG.tmp 2025-10-24T16:23:59.913218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00153a/r3tmp/yandexG3mmHG.tmp 2025-10-24T16:23:59.913292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00153a/r3tmp/yandexG3mmHG.tmp 2025-10-24T16:23:59.913338Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:59.925338Z INFO: TTestServer started on Port 17534 GrpcPort 8727 2025-10-24T16:23:59.944447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:59.944474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:59.945842Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:59.946232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17534 PQClient connected to localhost:8727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:59.969070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:59.976905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:24:00.087665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:24:00.091262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:24:00.133575Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.280685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824857993499948:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.280739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.280883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824857993499985:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.280889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824857993499986:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.280894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.281709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:00.286992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824857993500020:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.287025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.288566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824857993500025:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.288636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.293605Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824857993499989:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:24:00.349051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:00.355756Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824856768307210:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:00.356046Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=NGZkZGZkZjItMjY0ODM5YTgtZjdmMDNjZWUtNTA0MTYwYzA=, ActorId: [2:7564824856768307160:2293], ActorState: ExecuteState, TraceId: 01k8bghfh25he50tz03wf31d51, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:00.356587Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:00.356766Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824857993500090:2677] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.meta ... TableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-10-24T16:24:15.140449Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-10-24T16:24:15.141220Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-10-24T16:24:15.188062Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:67: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-10-24T16:24:15.188773Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:80: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-10-24T16:24:15.188784Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-10-24T16:24:15.188555Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824920090039805:2481] connected; active server actors: 1 2025-10-24T16:24:15.192787Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824920090039805:2481] disconnected. 2025-10-24T16:24:15.192802Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824920090039805:2481] disconnected; active server actors: 1 2025-10-24T16:24:15.192806Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1695: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824920090039805:2481] disconnected no session 2025-10-24T16:24:15.223975Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.223994Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.223997Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.229553Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-10-24T16:24:15.229570Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-10-24T16:24:15.229574Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7564824920090039775:2481] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-10-24T16:24:15.229584Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-10-24T16:24:15.230792Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-10-24T16:24:15.230807Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72075186224037892] server connected, pipe [3:7564824920090039828:2481], now have 1 active actors on pipe 2025-10-24T16:24:15.230861Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:15.230868Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:15.230898Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-10-24T16:24:15.230907Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.230922Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.230924Z node 3 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:24:15.230935Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.230947Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-10-24T16:24:15.230962Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:24:15.231065Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:15.231068Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:15.231085Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:24:15.231116Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 2025-10-24T16:24:15.233901Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1761323055233 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:15.233939Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|f2b7f0b4-bc541033-1928fffa-65539cf5_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-10-24T16:24:15.236344Z :INFO: [] MessageGroupId [src] SessionId [src|f2b7f0b4-bc541033-1928fffa-65539cf5_0] Write session: close. Timeout = 0 ms 2025-10-24T16:24:15.236359Z :INFO: [] MessageGroupId [src] SessionId [src|f2b7f0b4-bc541033-1928fffa-65539cf5_0] Write session will now close 2025-10-24T16:24:15.236369Z :DEBUG: [] MessageGroupId [src] SessionId [src|f2b7f0b4-bc541033-1928fffa-65539cf5_0] Write session: aborting 2025-10-24T16:24:15.236541Z :INFO: [] MessageGroupId [src] SessionId [src|f2b7f0b4-bc541033-1928fffa-65539cf5_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:24:15.236548Z :DEBUG: [] MessageGroupId [src] SessionId [src|f2b7f0b4-bc541033-1928fffa-65539cf5_0] Write session: destroy 2025-10-24T16:24:15.237131Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 grpc read done: success: 0 data: 2025-10-24T16:24:15.237143Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 grpc read failed 2025-10-24T16:24:15.237152Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 grpc closed 2025-10-24T16:24:15.237156Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|f2b7f0b4-bc541033-1928fffa-65539cf5_0 is DEAD 2025-10-24T16:24:15.237356Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:24:15.237642Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564824920090039828:2481] destroyed 2025-10-24T16:24:15.237661Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:24:15.237668Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.237673Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.237675Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2025-10-24T16:24:15.323369Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.323386Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.323389Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.424021Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.424040Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.424044Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.532007Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.532029Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.532033Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.639927Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.639948Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.639951Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.739975Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.739994Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.739997Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:15.840640Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:15.840659Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:15.840661Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-10-24T16:24:14.864041Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-10-24T16:24:14.965179Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-10-24T16:24:14.965206Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-10-24T16:24:14.965216Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:24:14.965224Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-10-24T16:24:14.982374Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:14.982398Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-10-24T16:24:14.982414Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-10-24T16:24:14.982420Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-10-24T16:24:14.982434Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:293: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts |54.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 6216, MsgBus: 24079 2025-10-24T16:23:39.606905Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824767972211389:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:39.606921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:39.617954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014d4/r3tmp/tmpZBsrrb/pdisk_1.dat 2025-10-24T16:23:39.656035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:39.656062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:39.656951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:39.671772Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:39.673652Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824767972211352:2081] 1761323019606539 != 1761323019606542 TServer::EnableGrpc on GrpcPort 6216, node 1 2025-10-24T16:23:39.700298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:39.700309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:39.700311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:39.700349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24079 2025-10-24T16:23:39.765514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24079 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:39.864398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:39.867602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:39.875611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:39.913227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:39.953037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.018340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.184896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772267180299:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.184929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.186709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772267180309:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.186725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.266115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.299003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.326838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.342941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.356466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.380385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.404805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.427516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.460469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772267181171:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.460490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.460608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772267181176:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.460616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824772267181177:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:40.460668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, stat ... 4227Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:14.374264Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16602 TClient is connected to server localhost:16602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:14.500356Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:14.502497Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:14.510477Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.531660Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.560961Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.577227Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.608188Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:14.752222Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915225768450:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.752243Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.752328Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915225768460:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.752341Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.761778Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.768930Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.781244Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.795462Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.812011Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.823345Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.835248Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.851614Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.867888Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915225769323:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.867916Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.868063Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915225769327:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.868074Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.868082Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915225769329:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.868923Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:14.876503Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564824915225769332:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:14.931301Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564824915225769384:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:15.234346Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:15.395146Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/rbo/unittest >> KqpRbo::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 22334, MsgBus: 23049 2025-10-24T16:24:02.676289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824866063956706:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:02.676304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:02.762827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001814/r3tmp/tmpNrTLdX/pdisk_1.dat 2025-10-24T16:24:03.244125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:03.419489Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22334, node 1 2025-10-24T16:24:03.471697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:03.548744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:03.548768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:03.549073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:03.721887Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:05.068253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:05.069903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:05.070411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:05.089508Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23049 TClient is connected to server localhost:23049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:07.130275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:07.682421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824866063956706:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:07.682453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:24:11.102234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824900423695745:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.102277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.112086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824904718663054:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.112107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:11.895451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:12.136319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824909013630451:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:12.136348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:12.136496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824909013630456:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:12.136502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824909013630457:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:12.136507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:12.140517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:12.188666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:24:12.189499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824909013630460:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:24:12.288977Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824909013630511:2422] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 17686, MsgBus: 19661 2025-10-24T16:24:14.484302Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824915795745062:2062];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:14.484316Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:14.509449Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001814/r3tmp/tmpqyOIUK/pdisk_1.dat 2025-10-24T16:24:14.592371Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:14.594942Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824915795745040:2081] 1761323054483776 != 1761323054483779 2025-10-24T16:24:14.614438Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:14.614464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17686, node 2 2025-10-24T16:24:14.619386Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:14.620427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:14.620436Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:14.620438Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:14.620483Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:14.755908Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19661 TClient is connected to server localhost:19661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:15.032356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:15.044130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:15.482486Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824920090713021:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.482547Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.484996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:15.486589Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824920090713050:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.486687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.495466Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:15.537261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:15.601263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824920090713195:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.601290Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.601399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824920090713200:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.601403Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824920090713201:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.601463Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.602284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:15.607539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-10-24T16:24:15.607634Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824920090713204:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:24:15.665414Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824920090713255:2443] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 62926, MsgBus: 10898 2025-10-24T16:23:38.344077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824760106776694:2083];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:38.346315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:38.357738Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014ed/r3tmp/tmpaH9UOR/pdisk_1.dat 2025-10-24T16:23:38.419155Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62926, node 1 2025-10-24T16:23:38.449728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:38.449739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:38.449750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:38.449787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:38.452092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:38.452121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:38.456162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10898 2025-10-24T16:23:38.480041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:38.547186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:38.550975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:38.560687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.583269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.658236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.674060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:38.831357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824760106778302:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.831381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.832076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824760106778312:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.832105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:38.890631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.906832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.918370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.934516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.950222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.961091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.976203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:38.991467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:39.009217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764401746470:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.009237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.009249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764401746475:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.009257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824764401746477:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.009260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:39.009905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:1 ... : Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:14.167932Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:14.175761Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:14.183910Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.196287Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.218841Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.231674Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:14.753214Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427327142:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.753238Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.758827Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.766624Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427327259:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.766948Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.767296Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427327264:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.767318Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.778277Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.807268Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.822799Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.857064Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.868502Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.903214Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.909336Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:14.921770Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:14.960655Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427328024:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.960678Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.960832Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427328029:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.960841Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564824915427328030:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.960907Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:14.961887Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:14.965772Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564824915427328033:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:15.038408Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564824919722295381:3551] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:15.521793Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight |55.0%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps |55.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |55.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TGroupMapperTest::MonteCarlo |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |55.1%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[action-dep_world_quote_code-default.txt-Results] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TGroupMapperTest::Block42_1disk |55.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |55.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |55.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> BsControllerConfig::MoveGroups [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2963:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2963:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2963:2117] 2025-10-24T16:23:39.073709Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.074021Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.074117Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.074435Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.074745Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.074813Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.074819Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.074882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.075932Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.075966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.075997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.076018Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.076031Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.076040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2025-10-24T16:23:39.086589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:39.086648Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.110815Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.110876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.110894Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.110908Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.110937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.110945Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.110951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.110961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.121294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.121360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.132278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.132346Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:39.132549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:39.132555Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:39.132600Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:39.132614Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:39.135335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-10-24T16:23:39.135770Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-10-24T16:23:39.135782Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-10-24T16:23:39.135787Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-10-24T16:23:39.135791Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-10-24T16:23:39.135796Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-10-24T16:23:39.135964Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-10-24T16:23:39.135970Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-10-24T16:23:39.135974Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-10-24T16:23:39.135978Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-10-24T16:23:39.135982Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-10-24T16:23:39.135987Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-10-24T16:23:39.135992Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-10-24T16:23:39.135996Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-10-24T16:23:39.136001Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-10-24T16:23:39.136005Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-10-24T16:23:39.136010Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-10-24T16:23:39.136014Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-10-24T16:23:39.136019Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-10-24T16:23:39.136024Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-10-24T16:23:39.136028Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-10-24T16:23:39.136033Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 8:1000 Path# /dev/disk1 2025-10-24T16:24:10.757660Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-10-24T16:24:10.757666Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-10-24T16:24:10.757671Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-10-24T16:24:10.757675Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-10-24T16:24:10.757679Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-10-24T16:24:10.757683Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-10-24T16:24:10.757688Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-10-24T16:24:10.757692Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-10-24T16:24:10.757696Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-10-24T16:24:10.757700Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-10-24T16:24:10.757704Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-10-24T16:24:10.757709Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-10-24T16:24:10.757713Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-10-24T16:24:10.757717Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-10-24T16:24:10.757721Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-10-24T16:24:10.757726Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-10-24T16:24:10.757730Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-10-24T16:24:10.757734Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-10-24T16:24:10.757738Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-10-24T16:24:10.757742Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-10-24T16:24:10.757746Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-10-24T16:24:10.757751Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-10-24T16:24:10.757756Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-10-24T16:24:10.757760Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-10-24T16:24:10.757765Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-10-24T16:24:10.757769Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-10-24T16:24:10.757773Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-10-24T16:24:10.757779Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-10-24T16:24:10.757783Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-10-24T16:24:10.757788Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-10-24T16:24:10.757793Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-10-24T16:24:10.757797Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-10-24T16:24:10.757801Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-10-24T16:24:10.757806Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-10-24T16:24:10.757810Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-10-24T16:24:10.757814Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-10-24T16:24:10.757819Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-10-24T16:24:10.757823Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-10-24T16:24:10.757827Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-10-24T16:24:10.757831Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-10-24T16:24:10.757835Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-10-24T16:24:10.757839Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-10-24T16:24:10.757843Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-10-24T16:24:10.757847Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-10-24T16:24:10.757851Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-10-24T16:24:10.757856Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-10-24T16:24:10.757860Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-10-24T16:24:10.757864Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-10-24T16:24:10.757868Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-10-24T16:24:10.757873Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-10-24T16:24:10.757878Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-10-24T16:24:10.757882Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-10-24T16:24:10.757887Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-10-24T16:24:10.757891Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-10-24T16:24:10.757895Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-10-24T16:24:10.757899Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-10-24T16:24:10.757904Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-10-24T16:24:10.757908Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-10-24T16:24:10.757912Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-10-24T16:24:10.757918Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-10-24T16:24:10.757922Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-10-24T16:24:10.757926Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-10-24T16:24:10.757930Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-10-24T16:24:10.757934Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-10-24T16:24:10.757939Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-10-24T16:24:10.757944Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-10-24T16:24:10.757948Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-10-24T16:24:10.757952Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-10-24T16:24:11.124534Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2215} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.367646s 2025-10-24T16:24:11.124933Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1019} StateWork event processing took too much time Type# 2146435078 Duration# 0.368064s 2025-10-24T16:24:11.180059Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-10-24T16:24:11.262307Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-10-24T16:24:11.301275Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-10-24T16:24:11.376922Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-10-24T16:24:11.393656Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-10-24T16:24:11.442657Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-10-24T16:24:11.469625Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> test.py::test[optimizers-drop_unessential_map-default.txt-Results] >> test.py::test[insert-select_with_sort_limit-default.txt-Results] >> TGroupMapperTest::NonUniformCluster >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TGroupMapperTest::MapperSequentialCalls >> TGroupMapperTest::NonUniformClusterMirror3dc |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TGroupMapperTest::SimplestMirror3dc [GOOD] >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> TGroupMapperTest::MapperSequentialCalls [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-10-24T16:23:06.648767Z :WriteRAW INFO: Random seed for debugging is 1761322986648760 2025-10-24T16:23:06.804074Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824623943852975:2261];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:06.804142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:06.812812Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:06.813617Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0013c9/r3tmp/tmp3YKaBF/pdisk_1.dat 2025-10-24T16:23:06.857408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:06.857463Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:06.858026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:06.924207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:06.924235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:06.928423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:06.928447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:06.934692Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:06.938823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:06.944222Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:06.948156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26575, node 1 2025-10-24T16:23:07.009093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0013c9/r3tmp/yandexiGbOA6.tmp 2025-10-24T16:23:07.009107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0013c9/r3tmp/yandexiGbOA6.tmp 2025-10-24T16:23:07.015976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0013c9/r3tmp/yandexiGbOA6.tmp 2025-10-24T16:23:07.016049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:07.039976Z INFO: TTestServer started on Port 15526 GrpcPort 26575 2025-10-24T16:23:07.040716Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:07.080480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15526 PQClient connected to localhost:26575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:07.113068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:23:07.444030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628749150697:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.444062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628749150735:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.444070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.444961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:07.445239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628749150738:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.445249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.454282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824628238820399:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.454329Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824628238820367:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.454434Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.456153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824628749150771:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.456241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:07.461412Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824628238820407:2129] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.475964Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824628238820404:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:23:07.477134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824628749150737:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:23:07.548707Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824628238820443:2137] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.548544Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824628749150834:2659] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:07.596359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:07.597979Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824628749150848:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Erro ... redPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-10-24T16:24:21.641379Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [15:7564824946091577902:2459] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-10-24T16:24:21.641382Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [15:7564824946091577902:2459] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-10-24T16:24:21.641390Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-10-24T16:24:21.644134Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72075186224037892] server connected, pipe [15:7564824946091577959:2459], now have 1 active actors on pipe 2025-10-24T16:24:21.648008Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-10-24T16:24:21.652179Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:21.652196Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:21.652228Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|72f704f4-faf84440-ee59968d-c2d10970_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-10-24T16:24:21.652240Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.652245Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.652248Z node 16 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:24:21.652261Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.652282Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-10-24T16:24:21.652298Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:24:21.662935Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.662954Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.662956Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.663001Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:24:21.663011Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:24:21.663046Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-10-24T16:24:21.664081Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|72f704f4-faf84440-ee59968d-c2d10970_0 2025-10-24T16:24:21.664944Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1761323061664 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:21.664986Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|72f704f4-faf84440-ee59968d-c2d10970_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-10-24T16:24:21.665100Z :INFO: [] MessageGroupId [src] SessionId [src|72f704f4-faf84440-ee59968d-c2d10970_0] Write session: close. Timeout = 0 ms 2025-10-24T16:24:21.665105Z :INFO: [] MessageGroupId [src] SessionId [src|72f704f4-faf84440-ee59968d-c2d10970_0] Write session will now close 2025-10-24T16:24:21.665110Z :DEBUG: [] MessageGroupId [src] SessionId [src|72f704f4-faf84440-ee59968d-c2d10970_0] Write session: aborting 2025-10-24T16:24:21.665187Z :INFO: [] MessageGroupId [src] SessionId [src|72f704f4-faf84440-ee59968d-c2d10970_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:24:21.665191Z :DEBUG: [] MessageGroupId [src] SessionId [src|72f704f4-faf84440-ee59968d-c2d10970_0] Write session: destroy 2025-10-24T16:24:21.668087Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|72f704f4-faf84440-ee59968d-c2d10970_0 grpc read done: success: 0 data: 2025-10-24T16:24:21.668095Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|72f704f4-faf84440-ee59968d-c2d10970_0 grpc read failed 2025-10-24T16:24:21.668100Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|72f704f4-faf84440-ee59968d-c2d10970_0 grpc closed 2025-10-24T16:24:21.668105Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|72f704f4-faf84440-ee59968d-c2d10970_0 is DEAD 2025-10-24T16:24:21.668351Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:24:21.670810Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [15:7564824946091577959:2459] destroyed 2025-10-24T16:24:21.670830Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:24:21.670838Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.670840Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.670842Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.727949Z :INFO: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Starting read session 2025-10-24T16:24:21.727964Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Starting cluster discovery 2025-10-24T16:24:21.728011Z :INFO: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8157: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8157
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:8157. " 2025-10-24T16:24:21.728016Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Restart cluster discovery in 0.008384s 2025-10-24T16:24:21.743929Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Starting cluster discovery 2025-10-24T16:24:21.744025Z :INFO: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8157: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8157
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:8157. " 2025-10-24T16:24:21.744031Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Restart cluster discovery in 0.018029s 2025-10-24T16:24:21.763951Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Starting cluster discovery 2025-10-24T16:24:21.764020Z :INFO: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8157: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8157
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:8157. " 2025-10-24T16:24:21.764024Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Restart cluster discovery in 0.024621s 2025-10-24T16:24:21.764524Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.764541Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.764544Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.792112Z :DEBUG: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Starting cluster discovery 2025-10-24T16:24:21.792190Z :NOTICE: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8157: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8157
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:8157. " } 2025-10-24T16:24:21.795908Z :NOTICE: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8157: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8157
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:8157. " } 2025-10-24T16:24:21.795943Z :INFO: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:21.795950Z :NOTICE: [/Root] [/Root] [d3f088bc-c2553ee4-3de22b5c-6b35c5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:21.868028Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.868050Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.868053Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.976157Z node 16 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.976175Z node 16 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.976178Z node 16 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> TGroupMapperTest::DifferentGroupSizeInUnits [GOOD] >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SimplestMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-10-24T16:24:00.619501Z :FallbackToSingleDb INFO: Random seed for debugging is 1761323040619490 2025-10-24T16:24:00.976488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:00.976544Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824855251782274:2253];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.987950Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001518/r3tmp/tmpWSSafG/pdisk_1.dat 2025-10-24T16:24:00.996650Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:01.004219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.111590Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:01.111657Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:01.115896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:01.129652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.129681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.130702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.130716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.136513Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:01.136562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:01.136853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:01.143334Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17580, node 1 2025-10-24T16:24:01.171553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001518/r3tmp/yandexeTokJL.tmp 2025-10-24T16:24:01.171568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001518/r3tmp/yandexeTokJL.tmp 2025-10-24T16:24:01.171637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001518/r3tmp/yandexeTokJL.tmp 2025-10-24T16:24:01.171694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:01.176628Z INFO: TTestServer started on Port 27345 GrpcPort 17580 TClient is connected to server localhost:27345 PQClient connected to localhost:17580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:01.204754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:24:01.291188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:01.366609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:01.475700Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858846817237:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.475725Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858846817261:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.475733Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.476973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:01.477499Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858846817293:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.477526Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.477638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824858846817295:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.477659Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.479951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824859546750346:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.479967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824859546750357:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.479974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.480732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824859546750361:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.480812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.484172Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824859546750362:2605] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:01.485959Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824859546750360:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:24:01.486010Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824858846817266:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:24:01.534671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:01.577681Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824858846817339:2167] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeRes ... 1 2025-10-24T16:24:20.075114Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.176054Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.176071Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.176074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.276326Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.276340Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.276344Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.383917Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.383938Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.383942Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.485289Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.485310Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.485313Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.587661Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.587949Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.587952Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.688120Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.688139Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.688142Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.790998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.791018Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.791021Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:20.892107Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.892123Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.892126Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2025-10-24T16:24:20.944762Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-10-24T16:24:20.944793Z :INFO: [/Root] [] [5e650845-454d810b-c50fe035-336f15d2] Open read subsessions to databases: { name: , endpoint: localhost:4962, path: /Root } 2025-10-24T16:24:20.944840Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Starting read session 2025-10-24T16:24:20.944844Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Starting single session 2025-10-24T16:24:20.947031Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-10-24T16:24:20.947037Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-10-24T16:24:20.947042Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] Reconnecting session to cluster in 0.000000s 2025-10-24T16:24:20.947094Z :ERROR: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:4962
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4962. 2025-10-24T16:24:20.947100Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-10-24T16:24:20.947102Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-10-24T16:24:20.947127Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:4962" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:4962
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4962. " } 2025-10-24T16:24:20.947963Z :NOTICE: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:20.947971Z :DEBUG: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:4962" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:4962
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4962. " } 2025-10-24T16:24:20.947988Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Closing read session. Close timeout: 0.010000s 2025-10-24T16:24:20.947995Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:20.948002Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:20.948006Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:20.948008Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:20.948011Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:20.948014Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:20.948016Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:20.948020Z :INFO: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:20.948023Z :NOTICE: [/Root] [/Root] [d5099722-a55d23b0-86169f16-6dc1887a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:20.992114Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:20.992134Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:20.992137Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.092556Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.092571Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.092574Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.195984Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.196002Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.196005Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.300030Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.300048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.300051Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.400147Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:21.400163Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:21.400166Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:21.685310Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1006: ActorId: [3:7564824946948039345:2481] TxId: 281474976715679. Ctx: { TraceId: 01k8bgj4bw6y8fsjbnchgcst6h, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZTA0MTM3NGEtODEzYTdjZGItZTk5ODYxZmItZjEzNDY4Nzc=, PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-10-24T16:24:21.685532Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [3:7564824946948039354:2481], TxId: 281474976715679, task: 3. Ctx: { TraceId : 01k8bgj4bw6y8fsjbnchgcst6h. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZTA0MTM3NGEtODEzYTdjZGItZTk5ODYxZmItZjEzNDY4Nzc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7564824946948039345:2481], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpBatchDelete::ManyPartitions_1 [GOOD] >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TGroupMapperTest::MakeDisksForbidden [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::DifferentGroupSizeInUnits [GOOD] >> TGroupMapperTest::WithAttentionToRacksAndReplication [GOOD] >> Viewer::TabletMerging [FAIL] >> Viewer::StorageGroupOutputWithoutFilterNoDepends ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 64175, MsgBus: 2779 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001451/r3tmp/tmpWEmzFc/pdisk_1.dat 2025-10-24T16:23:43.364726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:43.704465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:43.704495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:43.712713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:43.794194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:43.796788Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824782139175150:2081] 1761323023322679 != 1761323023322682 2025-10-24T16:23:43.798293Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64175, node 1 2025-10-24T16:23:43.967385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:43.967398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:43.967400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:43.967433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2779 2025-10-24T16:23:44.335941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:44.470315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:44.479971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:23:44.497839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.658962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.815673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.867757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.251448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790729111410:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.251483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.251681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790729111420:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.251689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.416758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.434195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.444288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.457768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.474743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.487696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.501096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.513501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.544779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790729112282:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.544818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.544949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790729112287:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.544967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790729112288:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.544973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.546097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... got bad distributable configuration TClient is connected to server localhost:5856 TClient is connected to server localhost:5856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... 2025-10-24T16:24:22.804704Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:22.812289Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:22.820635Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:22.864468Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:22.884769Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:22.926586Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:24:22.960746Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.176338Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7564824956729949223:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.176365Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.176451Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7564824956729949233:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.176456Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.192203Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.208080Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.222374Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.231910Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.251656Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.266720Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.282562Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.295824Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:23.332042Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7564824956729950095:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.332079Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.332284Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7564824956729950100:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.332294Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7564824956729950101:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.332298Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:23.333081Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:23.336961Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:24:23.337018Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7564824956729950104:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:23.432823Z node 20 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [20:7564824956729950156:3557] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:23.595999Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-10-24T16:24:23.965030Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::WithAttentionToRacksAndReplication [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Table [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> KikimrIcGateway::TestListPath >> KqpResultSetFormats::ArrowFormat_Simple >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> KikimrIcGateway::TestLoadExternalTable >> KqpResultSetFormats::DefaultFormat >> BasicUsage::ReadMirrored [GOOD] |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |55.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> KqpResultSetFormats::ArrowFormat_EmptyBatch >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> TExportToS3Tests::ShouldSucceedOnManyTables >> KikimrIcGateway::TestLoadTableMetadata >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> KikimrIcGateway::TestCreateExternalTable >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::DisableAutoDropping ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-10-24T16:24:01.154756Z :PropagateSessionClosed INFO: Random seed for debugging is 1761323041154747 2025-10-24T16:24:01.483245Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824860367755752:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.483565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.490073Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824861626785363:2087];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.490110Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.490483Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001527/r3tmp/tmpbsvlJk/pdisk_1.dat 2025-10-24T16:24:01.502112Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:01.542618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:01.542491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:01.556442Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11922, node 1 2025-10-24T16:24:01.587342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.587380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.592374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:01.600107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001527/r3tmp/yandexQ05v6W.tmp 2025-10-24T16:24:01.600117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001527/r3tmp/yandexQ05v6W.tmp 2025-10-24T16:24:01.601984Z INFO: TTestServer started on Port 21749 GrpcPort 11922 2025-10-24T16:24:01.603230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:01.603248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:01.604508Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:01.604800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21749 PQClient connected to localhost:11922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:01.632806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:01.711135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001527/r3tmp/yandexQ05v6W.tmp 2025-10-24T16:24:01.711238Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:01.712183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-10-24T16:24:01.740611Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:24:01.805325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:02.093609Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824865921752962:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.093642Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824865921752950:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.093679Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:02.103081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:02.117865Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824865921752965:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:24:02.192561Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824865921753004:2137] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:02.489453Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:02.494646Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:02.926141Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824864662724062:2332], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:02.926839Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=M2NhYjRmM2ItMWQ0YzIxNWEtMjA0ZGM0N2MtMTg4YjExM2I=, ActorId: [1:7564824864662724016:2323], ActorState: ExecuteState, TraceId: 01k8bghhst71hzsz3wqnyzy7tn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:02.927289Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:02.930151Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824865921753010:2305], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:02.931044Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=MjZiNTllZWMtY2FlYjk4MGUtMzc1MDgxNTYtYmZiOGZkMDE=, ActorId: [2:7564824865921752948:2295], ActorState: ExecuteState, TraceId: 01k8bghh8yap24nm9hftmg89rc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:02.931179Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:02.952649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/ ... DEBUG: partition.cpp:2350: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.474614Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037896][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.512800Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.512817Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.512820Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.528442Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|9196f9cf-8b7272cb-db82b264-8da72303_0] Write session will now close 2025-10-24T16:24:25.528465Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|9196f9cf-8b7272cb-db82b264-8da72303_0] Write session: aborting 2025-10-24T16:24:25.528676Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|9196f9cf-8b7272cb-db82b264-8da72303_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-10-24T16:24:25.528698Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|9196f9cf-8b7272cb-db82b264-8da72303_0] Write session: destroy 2025-10-24T16:24:25.528739Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Closing read session. Close timeout: 18446744073709.551615s 2025-10-24T16:24:25.528756Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-10-24T16:24:25.528766Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 380 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:25.528865Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:25.528869Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-10-24T16:24:25.528873Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 380 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:25.528878Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:25.528881Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2025-10-24T16:24:25.528883Z :INFO: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 380 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:25.528909Z :NOTICE: [/Root] [/Root] [df98f869-e757af71-3d146510-1f5e04f8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:25.531909Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|9196f9cf-8b7272cb-db82b264-8da72303_0 grpc read done: success: 0 data: 2025-10-24T16:24:25.531929Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|9196f9cf-8b7272cb-db82b264-8da72303_0 grpc read failed 2025-10-24T16:24:25.531941Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: src_id|9196f9cf-8b7272cb-db82b264-8da72303_0 grpc closed 2025-10-24T16:24:25.531947Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|9196f9cf-8b7272cb-db82b264-8da72303_0 is DEAD 2025-10-24T16:24:25.531977Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_1753824531041053225_v1 grpc read done: success# 0, data# { } 2025-10-24T16:24:25.531983Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_1753824531041053225_v1 grpc read failed 2025-10-24T16:24:25.531990Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_1753824531041053225_v1 grpc closed 2025-10-24T16:24:25.532011Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_1753824531041053225_v1 is DEAD 2025-10-24T16:24:25.532296Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:24:25.532723Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532740Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037894] server disconnected, pipe [3:7564824963055489966:2527] destroyed 2025-10-24T16:24:25.532749Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532752Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037896] server disconnected, pipe [3:7564824963055489964:2526] destroyed 2025-10-24T16:24:25.532757Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037896] server disconnected, pipe [3:7564824963055490073:2532] destroyed 2025-10-24T16:24:25.532771Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:24:25.532780Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.532784Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.532786Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037896][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.532807Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532810Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564824963055489962:2525] destroyed 2025-10-24T16:24:25.532829Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532832Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532834Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.532974Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7564824963055489949:2521] disconnected. 2025-10-24T16:24:25.532983Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7564824963055489949:2521] disconnected; active server actors: 1 2025-10-24T16:24:25.532987Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7564824963055489949:2521] client user disconnected session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.533009Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824963055489950:2521] disconnected. 2025-10-24T16:24:25.533013Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824963055489950:2521] disconnected; active server actors: 1 2025-10-24T16:24:25.533016Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564824963055489950:2521] client user disconnected session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.533026Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7564824963055489951:2521] disconnected. 2025-10-24T16:24:25.533029Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7564824963055489951:2521] disconnected; active server actors: 1 2025-10-24T16:24:25.533031Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7564824963055489951:2521] client user disconnected session shared/user_3_1_1753824531041053225_v1 2025-10-24T16:24:25.548284Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.548296Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.548298Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.574942Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.574956Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.574958Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037896][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.615587Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.615604Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.615607Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.648770Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037894][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.648802Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037894][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.648805Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037894][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.675908Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.675928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.675931Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037896][Partition][0][StateIdle] Try persist >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-10-24T16:23:56.487598Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1761323036487592 2025-10-24T16:23:58.206392Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824845961871413:2087];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:58.206422Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:58.263079Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00154a/r3tmp/tmpxLN8XY/pdisk_1.dat 2025-10-24T16:23:58.299451Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:58.696113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:58.735917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:58.735968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:58.746025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:58.746053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:58.748638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:58.748656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:58.757217Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:58.757249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:58.764333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:58.796575Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7996, node 1 2025-10-24T16:23:58.892461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:58.920336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00154a/r3tmp/yandex68ToiF.tmp 2025-10-24T16:23:58.920349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00154a/r3tmp/yandex68ToiF.tmp 2025-10-24T16:23:58.940070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:58.956250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00154a/r3tmp/yandex68ToiF.tmp 2025-10-24T16:23:58.956319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:58.960068Z INFO: TTestServer started on Port 13503 GrpcPort 7996 TClient is connected to server localhost:13503 PQClient connected to localhost:7996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:23:59.187964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:59.271178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:59.276178Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... waiting... waiting... waiting... 2025-10-24T16:23:59.960669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824853708816598:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.960701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.967118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:59.964730Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824850256839008:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.964767Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.967782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824853708816612:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.965023Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824850256839022:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.965029Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824850256839023:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.967804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824853708816613:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.965071Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.967812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.968904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824853708816651:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.968922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.969031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824853708816654:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.969036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:59.977255Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824853708816621:2631] txid# 281474976720661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:59.996880Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824853708816620:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-10-24T16:24:00.000025Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824850256839026:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-10-24T16:24: ... 10-24T16:24:23.351022Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.463912Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.463926Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.463928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.562357Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.562372Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.562374Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.664104Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.664119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.664121Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.767993Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.768004Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.768007Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.868207Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.868219Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.868222Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:23.976108Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:23.976118Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:23.976120Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.080048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.080064Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.080067Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.183934Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.183948Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.183951Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.288081Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.288094Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.288097Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.392060Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.392072Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.392074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.496262Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.496275Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.496277Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.599942Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.599957Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.599960Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.704030Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.704043Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.704046Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.804068Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.804085Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.804087Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:24.904348Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:24.904362Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:24.904365Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.005505Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.005518Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.005521Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.107946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.107959Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.107962Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.208737Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.208752Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.208756Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.308862Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.308878Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.308880Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.409693Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.409712Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.409715Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.511165Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.511192Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.511195Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.611749Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.611768Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.611771Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.711801Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.711818Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.711820Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.812298Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.812317Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.812321Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:25.912538Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:25.912559Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:25.912562Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> KqpResultSetFormats::ValueFormat_Simple >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> KikimrIcGateway::TestDropExternalTable >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TExportToS3Tests::DisableAutoDropping [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> TExportToS3Tests::DecimalOutOfRange >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::UidAsIdempotencyKey >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] Test command err: Trying to start YDB, gRPC: 22945, MsgBus: 24099 2025-10-24T16:24:25.665588Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824964731498339:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:25.665609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00098e/r3tmp/tmpaPTjQC/pdisk_1.dat 2025-10-24T16:24:25.722380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:25.734372Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:25.737143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824964731498314:2081] 1761323065665432 != 1761323065665435 TServer::EnableGrpc on GrpcPort 22945, node 1 2025-10-24T16:24:25.746635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:25.746650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:25.746652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:25.746697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24099 2025-10-24T16:24:25.776338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:25.776362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:25.777460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:25.822382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:25.825634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:25.838804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:24:25.863338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:25.906117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.923086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.964963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:26.367440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824969026467256:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.367478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.368397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824969026467265:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.368410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.675011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:26.858377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.874101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.911029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.930801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.973658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:27.026063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:27.097093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:27.196262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:27.344473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824973321435441:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:27.344505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:27.345413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824973321435446:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:27.345420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824973321435447:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:27.345425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:27.347980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:27.376176Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824973321435450:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:27.446265Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824973321435502:3560] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TExportToS3Tests::DecimalOutOfRange [GOOD] >> TExportToS3Tests::CorruptedDecimalValue >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> KqpBatchDelete::Large_3 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken >> test.py::test[insert-select_with_sort_limit-default.txt-Results] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps >> TExportToS3Tests::CorruptedDecimalValue [GOOD] >> KqpBatchDelete::SimplePartitions [GOOD] >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 4969, MsgBus: 31986 2025-10-24T16:23:41.692577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824773291448641:2272];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001485/r3tmp/tmp5ZPqc3/pdisk_1.dat 2025-10-24T16:23:41.696388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:41.776286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:41.779707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:41.779726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:41.782077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:41.803340Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4969, node 1 2025-10-24T16:23:41.816072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:41.816085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:41.816087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:41.816121Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31986 TClient is connected to server localhost:31986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:41.965915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:41.976293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:23:41.989671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.991424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:42.022888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:42.080816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:42.121920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.350508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824777586417305:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.350534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.350686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824777586417314:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.350693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.466835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.487129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.523420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.539904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.552513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.568265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.588094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.605984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:42.627949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824777586418177:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.627977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824777586418182:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.627988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.628065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824777586418185:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.628074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:42.628734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemesha ... : EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:15.664441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:15.672663Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:15.692720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:15.721089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:15.775692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:15.807065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:15.995392Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824920203360904:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.995414Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.995568Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824920203360914:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:15.995577Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.009292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.027379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.049265Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.081315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.158157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.184776Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.209048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.237720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:16.280601Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824924498329074:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.280632Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.281561Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824924498329079:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.281574Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564824924498329080:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.281582Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:16.282640Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:16.288745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:24:16.288839Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824924498329083:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:16.357321Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824924498329135:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:16.535952Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:16.675343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:20.534629Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7564824920203359289:2063];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:20.534676Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:24:30.564160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:24:30.564184Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> TExportToS3Tests::UserSID >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> TExportToS3Tests::RebootDuringCompletion >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::TopicsExport ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 31145, MsgBus: 24628 2025-10-24T16:23:39.873482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824766211578065:2172];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:39.873589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014d7/r3tmp/tmpsdfiHg/pdisk_1.dat 2025-10-24T16:23:40.012327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:40.034090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:40.034242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:40.040906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:40.125837Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:40.127932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824766211577912:2081] 1761323019868386 != 1761323019868389 TServer::EnableGrpc on GrpcPort 31145, node 1 2025-10-24T16:23:40.197323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:40.254474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:40.254486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:40.254488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:40.254530Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24628 TClient is connected to server localhost:24628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:40.437989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:40.441026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:23:40.500750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.612124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:23:40.714420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:40.739257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:40.879509Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:41.053731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774801514173:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.053877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.059944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774801514183:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.059977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.067907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774801514185:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.067937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.164369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.182771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.196926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.213093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.225712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.243170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.269129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.284888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:41.309441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824774801515050:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.309467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:41.309585Z node ... ScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 61274, node 16 2025-10-24T16:24:28.890812Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:28.890824Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:28.890826Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:28.890872Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25567 TClient is connected to server localhost:25567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:29.072762Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:29.080534Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:29.098803Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.125913Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.208780Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.240593Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.432208Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564824982267782301:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.432227Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.432311Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564824982267782311:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.432323Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.440546Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.475890Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.512965Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.535327Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.561834Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.579846Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.599607Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.616179Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.641494Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564824982267783171:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.641516Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.641619Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564824982267783177:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.641624Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564824982267783176:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.641628Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.642351Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:29.646311Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:24:29.646368Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7564824982267783180:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:29.711471Z node 16 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [16:7564824982267783232:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:29.747523Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestDropResourcePool >> TExportToS3Tests::TopicsExport [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter >> TExportToS3Tests::TopicsWithPermissionsExport >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestDropStreamingQuery >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> TExportToS3Tests::RebootDuringAbortion >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:25.797491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:25.797521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:25.797527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:25.797535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:25.797541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:25.797546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:25.797555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:25.797568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:25.797695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:25.797755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:25.814383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:25.814403Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:25.817706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:25.817744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:25.817780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:25.819205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:25.819389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:25.819510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.819595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:25.820637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:25.820682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:25.820932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:25.820944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:25.820974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:25.820983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:25.820990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:25.821017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.822385Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:25.847523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:25.847621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.847687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:25.847695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:25.847765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:25.847781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:25.848656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.848700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:25.848763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.848773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:25.848779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:25.848785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:25.849182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.849193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:25.849199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:25.849528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.849539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.849546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.849553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:25.850256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:25.850832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:25.850873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:25.851087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.851113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:25.851136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.851204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:25.851212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.851245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:25.851258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:25.851710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:25.851722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... t strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-10-24T16:24:31.371994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-10-24T16:24:31.372204Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:31.372220Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 21474838641 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:31.372226Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:412: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-10-24T16:24:31.372239Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710759:0 128 -> 129 2025-10-24T16:24:31.372255Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-10-24T16:24:31.379229Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:31.379239Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-10-24T16:24:31.379287Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:31.379294Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-10-24T16:24:31.379743Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:31.379756Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-10-24T16:24:31.379920Z node 5 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation 2025-10-24T16:24:31.382596Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:24:31.382614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:24:31.382617Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-10-24T16:24:31.382623Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-10-24T16:24:31.382628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-10-24T16:24:31.382640Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-10-24T16:24:31.383407Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:31847 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ACF5092E-0920-4400-9EC1-B352EB7CAABA amz-sdk-request: attempt=1 content-length: 106 content-md5: TSh230u831Vzs7S1LucNSQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 106 2025-10-24T16:24:31.417118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 21474838899 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:24:31.417141Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-10-24T16:24:31.417164Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 452 RawX2: 21474838899 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:24:31.417177Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 452 RawX2: 21474838899 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:24:31.417191Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:31.417195Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:31.417200Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-10-24T16:24:31.417206Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710759:0 129 -> 240 2025-10-24T16:24:31.417245Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:31.424466Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:31.424676Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:31.424693Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-10-24T16:24:31.424731Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-10-24T16:24:31.424737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:24:31.424744Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-10-24T16:24:31.424748Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:24:31.424754Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-10-24T16:24:31.424783Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:129:2153] message: TxId: 281474976710759 2025-10-24T16:24:31.424795Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:24:31.424802Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:0 2025-10-24T16:24:31.424807Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710759:0 2025-10-24T16:24:31.424848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-10-24T16:24:31.425612Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-10-24T16:24:31.425632Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710759 2025-10-24T16:24:31.425704Z node 5 :EXPORT NOTICE: schemeshard_export__create.cpp:665: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2025-10-24T16:24:31.426210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:24:31.426223Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:480:2439] TestWaitNotification: OK eventTxId 102 >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> test.py::test[action-dep_world_quote_code-default.txt-Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_expr--Results] [SKIPPED] >> test.py::test[action-eval_each_input_table-default.txt-Results] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |55.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::ExportStartTime >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestAlterResourcePool >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:25.741283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:25.741317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:25.741323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:25.741331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:25.741339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:25.741344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:25.741354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:25.741370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:25.741504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:25.741601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:25.758770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:25.758800Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:25.762608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:25.762657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:25.762711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:25.765557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:25.765822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:25.765947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.766039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:25.767287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:25.767335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:25.767596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:25.767607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:25.767636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:25.767645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:25.767652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:25.767692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.769549Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:25.796429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:25.796530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.796598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:25.796605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:25.796668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:25.796685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:25.797455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.797499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:25.797562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.797572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:25.797578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:25.797583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:25.798020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.798031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:25.798037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:25.798370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.798379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:25.798386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.798393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:25.799074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:25.799529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:25.799567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:25.799772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:25.799796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:25.799803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.799893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:25.799902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:25.799932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:25.799945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:25.800680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:25.800700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... unt reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:24:32.074297Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.074307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.074311Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710769 2025-10-24T16:24:32.074316Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 7 2025-10-24T16:24:32.074320Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:24:32.074329Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-10-24T16:24:32.074981Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710769, at schemeshard: 72057594046678944 2025-10-24T16:24:32.074993Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 0/1, is published: true 2025-10-24T16:24:32.074999Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710769, at schemeshard: 72057594046678944 2025-10-24T16:24:32.075051Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710769:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710769 msg type: 269090816 2025-10-24T16:24:32.075076Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710769, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710769 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710769 at step: 5000014 2025-10-24T16:24:32.075296Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000014, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.075317Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710769 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 21474838638 } } Step: 5000014 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:32.075324Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710769:0, step: 5000014, at schemeshard: 72057594046678944 2025-10-24T16:24:32.075348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710769:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.075357Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710769:0 progress is 1/1 2025-10-24T16:24:32.075361Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-10-24T16:24:32.075367Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710769:0 progress is 1/1 2025-10-24T16:24:32.075371Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-10-24T16:24:32.075379Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:24:32.075389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:24:32.075396Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710769, ready parts: 1/1, is published: false 2025-10-24T16:24:32.075403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710769 ready parts: 1/1 2025-10-24T16:24:32.075408Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710769:0 2025-10-24T16:24:32.075412Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710769:0 2025-10-24T16:24:32.075421Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:24:32.075430Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710769, publications: 2, subscribers: 1 2025-10-24T16:24:32.075435Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 1], 19 2025-10-24T16:24:32.075440Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710769, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-10-24T16:24:32.075605Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.075623Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.080132Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.080152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:32.080193Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710769, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-10-24T16:24:32.080222Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.080229Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2214], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 1 2025-10-24T16:24:32.080234Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2214], at schemeshard: 72057594046678944, txId: 281474976710769, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710769 2025-10-24T16:24:32.080427Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.080445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 19 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.080451Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710769 2025-10-24T16:24:32.080457Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 19 2025-10-24T16:24:32.080464Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:24:32.080604Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.080621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.080626Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710769 2025-10-24T16:24:32.080632Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710769, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-10-24T16:24:32.080639Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:24:32.080655Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710769, subscribers: 1 2025-10-24T16:24:32.080666Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:139:2161] 2025-10-24T16:24:32.081346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.081396Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710769 2025-10-24T16:24:32.081411Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710769 2025-10-24T16:24:32.081423Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710769 2025-10-24T16:24:32.081829Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:24:32.081842Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [5:949:2871] TestWaitNotification: OK eventTxId 104 >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> KikimrIcGateway::TestAlterResourcePool [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::DropSourceTableBeforeTransferring >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest >> TExportToS3Tests::TablePermissions [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:30.469099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:30.469133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:30.469138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:30.469148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:30.469155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:30.469159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:30.469169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:30.469183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:30.469310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:30.469767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:30.658317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:30.658344Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:30.716367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:30.716401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:30.716437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:30.722311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:30.722497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:30.722632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:30.722711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:30.723635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:30.723677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:30.723947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:30.723961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:30.723992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:30.724003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:30.724012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:30.724041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.725540Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:30.750915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:30.751048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.751134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:30.751145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:30.751236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:30.751254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:30.753920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:30.753980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:30.754058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.754071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:30.754077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:30.754084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:30.754972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.754983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:30.754988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:30.755485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.755494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:30.755500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:30.755509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:30.757884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:30.758726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:30.758768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:30.758989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:30.759013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:30.759021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:30.759092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:30.759099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:30.759134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:30.759147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:30.760302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:30.760313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 6710758 2025-10-24T16:24:32.868101Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-10-24T16:24:32.868105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-10-24T16:24:32.868116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-10-24T16:24:32.868443Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868452Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-10-24T16:24:32.868456Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-10-24T16:24:32.868838Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-10-24T16:24:32.868862Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000008 2025-10-24T16:24:32.868906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.868931Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 17179871344 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868954Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000008, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868974Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868981Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:32.868984Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:32.868988Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:32.868991Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:32.869000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-10-24T16:24:32.869008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-10-24T16:24:32.869014Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-10-24T16:24:32.869020Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:32.869024Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-10-24T16:24:32.869028Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710758:0 2025-10-24T16:24:32.869037Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-10-24T16:24:32.869042Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-10-24T16:24:32.869047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 17 2025-10-24T16:24:32.869050Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-10-24T16:24:32.869374Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.869383Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:32.869413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-10-24T16:24:32.869436Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.869441Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-10-24T16:24:32.869446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 7 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-10-24T16:24:32.869592Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 17 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.869604Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 17 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.869609Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:32.869614Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 17 2025-10-24T16:24:32.869619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2025-10-24T16:24:32.869730Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.869740Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.869744Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:32.869748Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-10-24T16:24:32.869753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-10-24T16:24:32.869764Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-10-24T16:24:32.869769Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:131:2155] 2025-10-24T16:24:32.869789Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:24:32.869793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-10-24T16:24:32.869803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-10-24T16:24:32.870423Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.870630Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:32.870649Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-10-24T16:24:32.870659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710758 2025-10-24T16:24:32.870742Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:24:32.871034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:24:32.871044Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1285:3092] TestWaitNotification: OK eventTxId 106 |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd >> TExportToS3Tests::TopicExport >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> TExportToS3Tests::ExportStartTime [GOOD] |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |55.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... step 3, product {1 parts epoch 2} done 00001.038 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00001.038 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 1}} 00001.038 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00001.038 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00001.038 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{5 on 2, Compact{1.2.7, eph 2}} 00001.038 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 5 00001.038 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00001.038 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00001.257 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00001.258 II| OPS_COMPACT: Compact{1.2.7, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00001.454 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00001.454 II| OPS_COMPACT: Compact{1.2.6, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00001.455 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 7, product {1 parts epoch 3} done 00002.027 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 3 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 6, product {1 parts epoch 0} done 00002.027 II| TABLET_EXECUTOR: Leader{1:2:11} starting compaction 00002.027 II| TABLET_EXECUTOR: Leader{1:2:12} starting Scan{7 on 2, Compact{1.2.11, eph 3}} 00002.027 II| TABLET_EXECUTOR: Leader{1:2:12} started compaction 7 00002.027 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00002.031 II| TABLET_EXECUTOR: Leader{1:2:12} starting compaction 00002.031 II| TABLET_EXECUTOR: Leader{1:2:13} starting Scan{9 on 2, Compact{1.2.12, eph 2}} 00002.031 II| TABLET_EXECUTOR: Leader{1:2:13} started compaction 9 00002.031 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} begin on TSubset{head 0, 0m 1p 0c} 00002.031 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00002.031 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{11 on 2, Compact{1.2.13, eph 1}} 00002.031 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 11 00002.031 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00002.031 II| OPS_COMPACT: Compact{1.2.11, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00002.031 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00002.126 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 7 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 11, product {1 parts epoch 4} done 00002.126 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00002.126 II| OPS_COMPACT: Compact{1.2.13, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00002.127 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00002.127 II| OPS_COMPACT: Compact{1.2.12, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00002.139 II| TABLET_EXECUTOR: Leader{1:2:15} Compact 11 on TGenCompactionParams{2: gen 2 epoch 0, 1 parts} step 13, product {1 parts epoch 0} done 00002.139 II| TABLET_EXECUTOR: Leader{1:2:16} Compact 9 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 12, product {1 parts epoch 0} done 00002.139 II| TABLET_EXECUTOR: Leader{1:2:17} starting compaction 00002.139 II| TABLET_EXECUTOR: Leader{1:2:18} starting Scan{13 on 2, Compact{1.2.17, eph 3}} 00002.139 II| TABLET_EXECUTOR: Leader{1:2:18} started compaction 13 00002.139 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} begin on TSubset{head 0, 0m 1p 0c} 00002.140 II| TABLET_EXECUTOR: Leader{1:2:18} starting compaction 00002.140 II| TABLET_EXECUTOR: Leader{1:2:19} starting Scan{15 on 2, Compact{1.2.18, eph 2}} 00002.140 II| TABLET_EXECUTOR: Leader{1:2:19} started compaction 15 00002.140 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00002.161 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00002.161 II| OPS_COMPACT: Compact{1.2.17, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00002.174 II| TABLET_EXECUTOR: Leader{1:2:19} Compact 13 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 17, product {1 parts epoch 0} done 00002.192 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=5}, trace 0 of 0 ~3p 00002.192 II| OPS_COMPACT: Compact{1.2.18, eph 2} end=Done, 6 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00002.192 II| TABLET_EXECUTOR: Leader{1:2:20} Compact 15 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 18, product {1 parts epoch 0} done 00002.193 II| TABLET_EXECUTOR: Leader{1:2:21} starting compaction 00002.193 II| TABLET_EXECUTOR: Leader{1:2:22} starting Scan{17 on 2, Compact{1.2.21, eph 3}} 00002.193 II| TABLET_EXECUTOR: Leader{1:2:22} started compaction 17 00002.193 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} begin on TSubset{head 0, 0m 2p 0c} 00002.219 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 0 of 0 ~3p 00002.219 II| OPS_COMPACT: Compact{1.2.21, eph 3} end=Done, 6 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00002.219 II| TABLET_EXECUTOR: Leader{1:2:22} Compact 17 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 21, product {1 parts epoch 0} done 00002.231 II| TABLET_EXECUTOR: Leader{1:2:23} suiciding, Waste{2:0, 20001011b +(44, 90121851b), 22 trc, -90121851b acc} 00002.232 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00002.232 NN| TABLET_SAUSAGECACHE: Poison cache serviced 26 reqs hit {7 20000231b} miss {19 100000536b} 00002.232 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.232 II| FAKE_ENV: DS.0 gone, left {3604b, 22}, put {3624b, 23} 00002.232 II| FAKE_ENV: DS.1 gone, left {122950b, 32}, put {122950b, 32} 00002.235 II| FAKE_ENV: DS.2 gone, left {110001012b, 29}, put {110001012b, 29} 00002.253 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.253 II| FAKE_ENV: All BS storage groups are stopped 00002.253 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.253 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 82}, stopped 00000.000 II| FAKE_ENV: Born at 2025-10-24T16:24:32.652601Z 00000.002 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.002 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.002 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.002 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.033 II| TABLET_EXECUTOR: Leader{1:2:3} starting compaction 00000.033 II| TABLET_EXECUTOR: Leader{1:2:4} starting Scan{1 on 2, Compact{1.2.3, eph 1}} 00000.033 II| TABLET_EXECUTOR: Leader{1:2:4} started compaction 1 00000.033 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.038 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.038 II| OPS_COMPACT: Compact{1.2.3, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (39360 0 0)b }, ecr=0.004 00000.038 II| TABLET_EXECUTOR: Leader{1:2:4} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.054 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.054 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 2}} 00000.054 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.054 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.090 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} end=Done, 2r seen, TFwd{fetch=38.3KiB,saved=38.3KiB,usage=38.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.090 II| OPS_COMPACT: Compact{1.2.6, eph 2} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.090 II| TABLET_EXECUTOR: Leader{1:2:7} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 6, product {1 parts epoch 3} done 00000.117 II| TABLET_EXECUTOR: Leader{1:2:9} starting compaction 00000.117 II| TABLET_EXECUTOR: Leader{1:2:10} starting Scan{5 on 2, Compact{1.2.9, eph 3}} 00000.117 II| TABLET_EXECUTOR: Leader{1:2:10} started compaction 5 00000.117 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.179 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} end=Done, 2r seen, TFwd{fetch=76.8KiB,saved=76.8KiB,usage=76.8KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.180 II| OPS_COMPACT: Compact{1.2.9, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.251 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 9, product {1 parts epoch 4} done 00000.252 II| TABLET_EXECUTOR: Leader{1:2:11} suiciding, Waste{2:0, 79404b +(7, 236400b), 10 trc, -236400b acc} 00000.252 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.252 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {4 117917b} miss {0 0b} 00000.252 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.252 II| FAKE_ENV: DS.0 gone, left {1032b, 10}, put {1052b, 11} 00000.252 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.252 II| FAKE_ENV: DS.1 gone, left {316163b, 14}, put {316163b, 14} 00000.252 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.252 II| FAKE_ENV: All BS storage groups are stopped 00000.252 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.252 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 40}, stopped >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> KikimrIcGateway::TestDropStreamingQuery [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> TExportToS3Tests::TopicExport [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::TopicWithPermissionsExport >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> TExportToS3Tests::CheckItemProgress >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ExportTableWithUniqueIndex >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 |55.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |55.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[schema-yamred_dsv_select_from_dict--Results] |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 20847, MsgBus: 15302 2025-10-24T16:24:25.921619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824964761102064:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:25.921662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000962/r3tmp/tmpWrhc6K/pdisk_1.dat 2025-10-24T16:24:25.983726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20847, node 1 2025-10-24T16:24:26.012631Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.013003Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824964761101808:2081] 1761323065915794 != 1761323065915797 2025-10-24T16:24:26.026782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.027949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.028748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:26.052398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.052413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.052416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.052457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15302 TClient is connected to server localhost:15302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.216565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:26.220792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:24:26.267560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:24:26.280540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-10-24T16:24:26.924619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:28.444428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004434:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.444486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.454257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:28.456106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004487:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.456134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.459485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004499:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.459506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.568162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:28.620239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:28.653678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:28.698390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004748:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.698414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.698551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004753:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.698557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824977646004754:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.698561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:28.699384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:28.712645Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824977646004757:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-10-24T16:24:28.785534Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824977646004808:2574] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 12773, MsgBus: 31888 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000962/r3tmp/tmpGs1vwJ/pdisk_1.dat 2025-10-24T16:24:29.435425Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:29.435482Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:29.436701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:29.436722Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:29.437543Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:29.440232Z nod ... s permissions } 2025-10-24T16:24:31.637577Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.638498Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:31.644383Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:24:31.644505Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564824988380950361:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:31.732867Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564824988380950413:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:31.857029Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:32.068494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-10-24T16:24:32.070042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-10-24T16:24:32.074648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-10-24T16:24:32.087267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 23184, MsgBus: 27682 2025-10-24T16:24:32.600417Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7564824991971471036:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:32.600575Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:32.605055Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000962/r3tmp/tmpEuhYuC/pdisk_1.dat 2025-10-24T16:24:32.621926Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23184, node 4 2025-10-24T16:24:32.631706Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:32.631719Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:32.631721Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:32.631771Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27682 TClient is connected to server localhost:27682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:32.691572Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:32.694187Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:32.704772Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:32.704809Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:32.705740Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:32.820107Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:33.601219Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:33.604623Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:33.622968Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824996266439053:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:33.622986Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824996266439061:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:33.622992Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:33.623120Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564824996266439068:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:33.623135Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:33.623659Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:33.634353Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564824996266439067:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:24:33.686943Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564824996266439109:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:33.761540Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:33.827867Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:33.932451Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> CompressExecutor::TestExecutorMemUsage [GOOD] >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AllTypesColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2963:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2963:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2963:2117] 2025-10-24T16:23:39.203696Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.205211Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.205326Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.205682Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.205997Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.206071Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.206079Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.206144Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.207310Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.207349Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.207379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.207398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.207410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.207421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2025-10-24T16:23:39.217989Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:39.218046Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.242051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.242121Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.242140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.242156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.242187Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.242198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.242204Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.242217Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.256911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.256971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.267377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.267459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:39.267745Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:39.267754Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:39.267804Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:39.267830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:39.271002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-10-24T16:23:39.271223Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-10-24T16:23:39.271233Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-10-24T16:23:39.271239Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-10-24T16:23:39.271243Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-10-24T16:23:39.271248Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-10-24T16:23:39.271253Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-10-24T16:23:39.271259Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-10-24T16:23:39.271264Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-10-24T16:23:39.271269Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-10-24T16:23:39.271273Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-10-24T16:23:39.271278Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-10-24T16:23:39.271282Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-10-24T16:23:39.271287Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-10-24T16:23:39.271292Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-10-24T16:23:39.271296Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-10-24T16:23:39.271301Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-10-24T16:23:39.271306Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-10-24T16:23:39.271311Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-10-24T16:23:39.271315Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-10-24T16:23:39.271320Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-10-24T16:23:39.271325Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-10-24T16:23:39.271329Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-10-24T16:23:39.271334Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-10-24T16:23:39.271339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-10-24T16:23:39.271344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-10-24T16:23:39.271349Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-10-24T16:23:39.271354Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-10-24T16:23:39.271358Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-10-24T16:23:39.271362Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-10-24T16:23:39.271366Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-10-24T16:23:39.271371Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-10-24T16:23:39.271375Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-10-24T16:23:39.271380Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Cr ... 4T16:24:24.989805Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 203:1001 Path# /dev/disk2 2025-10-24T16:24:24.989810Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 203:1002 Path# /dev/disk3 2025-10-24T16:24:24.989815Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-10-24T16:24:24.989821Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-10-24T16:24:24.989826Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-10-24T16:24:24.989831Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-10-24T16:24:24.989837Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-10-24T16:24:24.989843Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-10-24T16:24:24.989848Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-10-24T16:24:24.989853Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-10-24T16:24:24.989859Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-10-24T16:24:24.989864Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-10-24T16:24:24.989883Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-10-24T16:24:24.989889Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-10-24T16:24:24.989894Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-10-24T16:24:24.989899Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-10-24T16:24:24.989904Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-10-24T16:24:24.989909Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-10-24T16:24:24.989914Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-10-24T16:24:24.989919Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-10-24T16:24:24.989924Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-10-24T16:24:24.989929Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-10-24T16:24:24.989935Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-10-24T16:24:25.092664Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-10-24T16:24:25.094627Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-10-24T16:24:25.094650Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-10-24T16:24:25.094657Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-10-24T16:24:25.094662Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-10-24T16:24:25.094668Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-10-24T16:24:25.094674Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-10-24T16:24:25.094679Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-10-24T16:24:25.094685Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-10-24T16:24:25.094691Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-10-24T16:24:25.094696Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-10-24T16:24:25.094702Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-10-24T16:24:25.094708Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-10-24T16:24:25.094716Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-10-24T16:24:25.094722Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-10-24T16:24:25.094727Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-10-24T16:24:25.094732Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-10-24T16:24:25.094738Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-10-24T16:24:25.094743Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-10-24T16:24:25.094748Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-10-24T16:24:25.094753Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-10-24T16:24:25.094759Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-10-24T16:24:25.094764Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-10-24T16:24:25.094769Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-10-24T16:24:25.094775Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-10-24T16:24:25.094780Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-10-24T16:24:25.094786Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-10-24T16:24:25.094791Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-10-24T16:24:25.094808Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-10-24T16:24:25.094838Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-10-24T16:24:25.094844Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> test.py::test[optimizers-drop_unessential_map-default.txt-Results] [GOOD] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-nonselected_direct_row--Results] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:32.766198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:32.766231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:32.766237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:32.766246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:32.766254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:32.766259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:32.766269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:32.766287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:32.766429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:32.766502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:32.796268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:32.796296Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:32.804337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:32.804441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:32.804482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:32.852039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:32.852221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:32.852365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.852492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:32.868173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:32.868571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.868610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:32.868620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:32.868626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:32.868646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.873827Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:24:32.896964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:32.897082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.897164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:32.897174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:32.897250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:32.897267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:32.898222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.898276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:32.898347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.898357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:32.898364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:32.898370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:32.898746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.898757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:32.898763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:32.899069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.899078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.899085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.899093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:32.899819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:32.900253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:32.900300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:32.900563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.900589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:32.900598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.900681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:32.900688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.900727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:32.900741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:32.901119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.901128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 474976710758 2025-10-24T16:24:35.928067Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:24:35.928074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:24:35.928087Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-10-24T16:24:35.928959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-10-24T16:24:35.928972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-10-24T16:24:35.928978Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-10-24T16:24:35.929113Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-10-24T16:24:35.929144Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-10-24T16:24:35.929198Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000004 2025-10-24T16:24:35.929371Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:35.929394Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 17179871344 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:35.929402Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000004, at schemeshard: 72057594046678944 2025-10-24T16:24:35.929427Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:24:35.929439Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:35.929444Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:35.929450Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:35.929453Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:35.929463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:24:35.929472Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:24:35.929478Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-10-24T16:24:35.929488Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:35.929493Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-10-24T16:24:35.929498Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710758:0 2025-10-24T16:24:35.929507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:24:35.929514Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-10-24T16:24:35.929520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-10-24T16:24:35.929524Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:24:35.929661Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.936215Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:35.936236Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:35.936291Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:24:35.936322Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:35.936327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-10-24T16:24:35.936331Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:212:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-10-24T16:24:35.936522Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.936533Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.936538Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:35.936543Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-10-24T16:24:35.936550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:24:35.936700Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.936710Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.936714Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:35.936717Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:24:35.936721Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-10-24T16:24:35.936731Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-10-24T16:24:35.936736Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:131:2155] 2025-10-24T16:24:35.936756Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:24:35.936761Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-10-24T16:24:35.936772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:24:35.937306Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.937418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:35.937509Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-10-24T16:24:35.937520Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710758 2025-10-24T16:24:35.937706Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:24:35.937865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:24:35.937874Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:520:2467] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] Test command err: Trying to start YDB, gRPC: 24083, MsgBus: 28338 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000999/r3tmp/tmp1n3N3Q/pdisk_1.dat 2025-10-24T16:24:25.583443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:25.583473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:25.584622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:25.593310Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:25.594152Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824965875741038:2081] 1761323065539819 != 1761323065539822 TServer::EnableGrpc on GrpcPort 24083, node 1 2025-10-24T16:24:25.602031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:25.606730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:25.606740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:25.606742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:25.606784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28338 TClient is connected to server localhost:28338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:25.672878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:25.676139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:25.681701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.714826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.742886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.757835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:25.978265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824965875742690:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:25.978295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:25.978422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824965875742700:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:25.978428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.100869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.114732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.130298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.143475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.157555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.178348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.193959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.231477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:26.397337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824970170710859:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.397354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.397798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824970170710864:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.397806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824970170710865:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.397863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:26.401828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:26.432521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281 ... ype: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:34.905310Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:34.916376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:34.932894Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:35.011022Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:35.084844Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:35.105102Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:35.531603Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825005318630416:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.531661Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.540013Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825005318630434:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.540170Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.544779Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.566984Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.580705Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.605357Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.633370Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.644703Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:35.658771Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.684974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.714502Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.774850Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825005318631295:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.774882Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.775043Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825005318631300:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.775053Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825005318631301:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.775059Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.776272Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:35.784924Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825005318631304:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:35.875601Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825005318631356:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:36.183261Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-10-24T16:24:36.185503Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-10-24T16:24:36.205039Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:24:36.207786Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::CheckItemProgress [GOOD] >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 26619, MsgBus: 19652 2025-10-24T16:24:29.584303Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824981537209424:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:29.584352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000929/r3tmp/tmpaYeeni/pdisk_1.dat 2025-10-24T16:24:30.108558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:30.125532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:30.125555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:30.132243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26619, node 1 2025-10-24T16:24:30.276249Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:30.305789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:30.305888Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824981537209320:2081] 1761323069554451 != 1761323069554454 2025-10-24T16:24:30.376966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:30.376982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:30.376985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:30.377042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:30.631964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19652 TClient is connected to server localhost:19652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:30.961841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:31.001354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:24:31.005702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 25805, MsgBus: 30932 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000929/r3tmp/tmptbnoUK/pdisk_1.dat 2025-10-24T16:24:31.865040Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:31.871922Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:31.875092Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:31.875114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:31.876087Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:31.878696Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25805, node 2 2025-10-24T16:24:31.887298Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:31.887312Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:31.887314Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:31.887356Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30932 TClient is connected to server localhost:30932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:31.952110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:31.970994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:24:32.137895Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Trying to start YDB, gRPC: 31816, MsgBus: 20598 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000929/r3tmp/tmpxoZypu/pdisk_1.dat 2025-10-24T16:24:32.471281Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:32.471380Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:32.471419Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:32.471514Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:32.475545Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:7564824995326271558:2081] 1761323072448866 != 1761323072448869 2025-10-24T16:24:32.476519Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:32.476927Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31816, node 3 2025-10-24T16:24:32.488590Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:32.488602Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:32.488605Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:32.488651Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20598 TClient is connected to server localhost:20598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:24:32.542935Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:32.548118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:32.560233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 20627, MsgBus: 17499 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000929/r3tmp/tmppCOw5h/pdisk_1.dat 2025-10-24T16:24:33.010137Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7564824999798036627:2095];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:33.012292Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:33.043941Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:33.044660Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:33.046265Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:7564824999798036547:2081] 1761323073008452 != 1761323073008455 TServer::EnableGrpc on GrpcPort 20627, node 4 2025-10-24T16:24:33.060062Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:33.060077Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:33.060079Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:33.060138Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17499 TClient is connected to server localhost:17499 2025-10-24T16:24:33.120083Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:33.120118Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:33.121813Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:33.125657Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:33.295047Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:34.013915Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:34.035986Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:34.134259Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825004093004627:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:34.134296Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:34.134453Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825004093004639:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:34.134461Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825004093004640:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:34.134475Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:34.135462Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:34.138723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:24:34.138935Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825004093004644:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:24:34.217774Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825004093004684:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:35.497188Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:35.654352Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.838532Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) >> TExportToS3Tests::CompletedExportEndTime >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 18823, MsgBus: 29673 2025-10-24T16:24:28.737191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824977037439834:2235];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:28.737251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:28.760554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00092f/r3tmp/tmpGv3sRR/pdisk_1.dat 2025-10-24T16:24:29.214339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:29.214366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:29.228340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:29.293279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:29.298813Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824977037439610:2081] 1761323068705180 != 1761323068705183 2025-10-24T16:24:29.304406Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18823, node 1 2025-10-24T16:24:29.483513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:29.488163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:29.488173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:29.488175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:29.488214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:29.723964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29673 TClient is connected to server localhost:29673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:30.058502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:30.062810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:30.079129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:24:30.095476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 20322, MsgBus: 21489 2025-10-24T16:24:31.589764Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824990029985313:2252];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:31.589812Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00092f/r3tmp/tmprZwhLf/pdisk_1.dat 2025-10-24T16:24:31.632014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:31.640416Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:31.640448Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:31.641357Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:31.645012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:31.645070Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824990029985090:2081] 1761323071587477 != 1761323071587480 TServer::EnableGrpc on GrpcPort 20322, node 2 2025-10-24T16:24:31.670528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:31.670540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:31.670543Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:31.670590Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21489 TClient is connected to server localhost:21489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:24:31.722940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:31.733260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:24:31.741329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133 2025-10-24T16:24:31.747914Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824990029985772:2339] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133" severity: 1 } 2025-10-24T16:24:31.747989Z node 2 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976715660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:133 Trying to start YDB, gRPC: 28476, MsgBus: 25064 2025-10-24T16:24:32.555006Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7564824995539962630:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:32.555168Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initia ... 7968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65217, node 4 2025-10-24T16:24:33.350146Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:33.350175Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:33.350177Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:33.350228Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16126 TClient is connected to server localhost:16126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:24:33.402017Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:33.403560Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:33.411350Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:33.416243Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) Trying to start YDB, gRPC: 64656, MsgBus: 25853 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00092f/r3tmp/tmpAQ1K1u/pdisk_1.dat 2025-10-24T16:24:33.870248Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:33.875959Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:33.876645Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:33.876676Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:33.877611Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64656, node 5 2025-10-24T16:24:33.886544Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:33.886559Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:33.886563Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:33.886608Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:33.888124Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:33.891415Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:7564825000239629563:2081] 1761323073856035 != 1761323073856038 TClient is connected to server localhost:25853 TClient is connected to server localhost:25853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:34.038881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:34.051942Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:34.052169Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:34.860119Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:34.892782Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:35.079620Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825008829564944:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.079640Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.082392Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825008829564955:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.082410Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825008829564957:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.082440Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:35.084414Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:35.099607Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825008829564961:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:24:35.164825Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825008829565001:2400] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:35.984046Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:36.106270Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:36.257636Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-10-24T16:23:07.131900Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1761322987131893 2025-10-24T16:23:07.425289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824627981127633:2264];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:07.425345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:07.462401Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.472877Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824629072590452:2146];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:07.472999Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:07.485395Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:07.485687Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001367/r3tmp/tmpDFMAlT/pdisk_1.dat 2025-10-24T16:23:07.627381Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:07.626810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:07.755512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.755534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.765841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.765862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.767766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:07.772292Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:07.772705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:07.790723Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11695, node 1 2025-10-24T16:23:07.858292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:07.861938Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:07.936053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/001367/r3tmp/yandexIrqSch.tmp 2025-10-24T16:23:07.936062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/001367/r3tmp/yandexIrqSch.tmp 2025-10-24T16:23:07.936481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/001367/r3tmp/yandexIrqSch.tmp 2025-10-24T16:23:07.936528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:08.037293Z INFO: TTestServer started on Port 23089 GrpcPort 11695 TClient is connected to server localhost:23089 PQClient connected to localhost:11695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:08.173352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:23:08.424046Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:08.472084Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:10.060674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824641957492611:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.060715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824641957492591:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.060726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.061311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824641957492622:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.061322Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.067577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:10.068457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824640866030313:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.068472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.068589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824640866030325:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.068597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824640866030326:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.068602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:10.071446Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824640866030330:2622] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:10.081226Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824641957492621:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:23:10.081147Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824640866030329:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:23:10.148404Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824640866030411:2673] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:10.176350Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824641957492651:2140] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], ... o resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:33.789929Z node 15 :KQP_EXECUTER WARN: kqp_executer_impl.h:245: ActorId: [15:7564825000481419992:2514] TxId: 281474976710682. Ctx: { TraceId: 01k8bgjfej5jp1s7bkw9wbj7r8, Database: /Root, SessionId: ydb://session/3?node_id=15&id=YjlhMjI2MjQtM2I3ODhiOWYtOTFhYjlmNGQtNWY1YmNjNjc=, PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:33.790038Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=15&id=YjlhMjI2MjQtM2I3ODhiOWYtOTFhYjlmNGQtNWY1YmNjNjc=, ActorId: [15:7564824996186452683:2514], ActorState: ExecuteState, TraceId: 01k8bgjfej5jp1s7bkw9wbj7r8, Create QueryResponse for error on request, msg: 2025-10-24T16:24:33.790476Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01k8bgjfg67gp81nn9cqfnc04p" } } YdbStatus: UNAVAILABLE ConsumedRu: 32 } 2025-10-24T16:24:33.839905Z node 16 :KQP_EXECUTER WARN: kqp_shards_resolver.cpp:86: [ShardsResolver] TxId: 281474976720668. Failed to resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:33.839970Z node 16 :KQP_EXECUTER WARN: kqp_executer_impl.h:245: ActorId: [16:7564824997663481241:2450] TxId: 281474976720668. Ctx: { TraceId: 01k8bgjfg83b2tdgvdrnatrrg4, Database: /Root, SessionId: ydb://session/3?node_id=16&id=YjY0YTgzYjMtZTc3NzE1Y2EtNDBjZWVhNzYtNjgxNmM3Yjc=, PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:33.840088Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=16&id=YjY0YTgzYjMtZTc3NzE1Y2EtNDBjZWVhNzYtNjgxNmM3Yjc=, ActorId: [16:7564824997663481228:2450], ActorState: ExecuteState, TraceId: 01k8bgjfg83b2tdgvdrnatrrg4, Create QueryResponse for error on request, msg: 2025-10-24T16:24:33.840532Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01k8bgjfhv3rqfpzqmwnfd7732" } } YdbStatus: UNAVAILABLE ConsumedRu: 32 } 2025-10-24T16:24:34.083975Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2025-10-24T16:24:34.085785Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:2753" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2025-10-24T16:24:34.085799Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Start write session. Will connect to endpoint: localhost:2753 2025-10-24T16:24:34.086183Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-10-24T16:24:34.086413Z node 15 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-10-24T16:24:34.086427Z node 15 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 3 2025-10-24T16:24:34.086580Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-10-24T16:24:34.086615Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:51058 2025-10-24T16:24:34.086620Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51058 proto=v1 topic=test-topic durationSec=0 2025-10-24T16:24:34.086623Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-10-24T16:24:34.087326Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 3 sessionId: describe result for acl check 2025-10-24T16:24:34.087359Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-10-24T16:24:34.087361Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-10-24T16:24:34.087362Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-10-24T16:24:34.087368Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [15:7564825004776387368:2524] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-10-24T16:24:34.087933Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [15:7564825004776387368:2524] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-10-24T16:24:34.892300Z node 15 :KQP_EXECUTER WARN: kqp_shards_resolver.cpp:86: [ShardsResolver] TxId: 281474976710684. Failed to resolve tablet: 72075186224037891 after several retries. 2025-10-24T16:24:34.892339Z node 15 :KQP_EXECUTER WARN: kqp_executer_impl.h:245: ActorId: [15:7564825004776387378:2526] TxId: 281474976710684. Ctx: { TraceId: 01k8bgjgh84x5zesr1yw5zqjpf, Database: /Root, SessionId: ydb://session/3?node_id=15&id=NDFlZjVhOTEtNDE0OWY4ZGEtODY5N2EyMmEtNjNmNDI4Zjc=, PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-10-24T16:24:34.892415Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=15&id=NDFlZjVhOTEtNDE0OWY4ZGEtODY5N2EyMmEtNjNmNDI4Zjc=, ActorId: [15:7564825004776387369:2526], ActorState: ExecuteState, TraceId: 01k8bgjgh84x5zesr1yw5zqjpf, Create QueryResponse for error on request, msg: 2025-10-24T16:24:34.893001Z node 15 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [15:7564825004776387368:2524] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFlZjVhOTEtNDE0OWY4ZGEtODY5N2EyMmEtNjNmNDI4Zjc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01k8bgjgh9670ardpst8vzvwf5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-10-24T16:24:34.893027Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFlZjVhOTEtNDE0OWY4ZGEtODY5N2EyMmEtNjNmNDI4Zjc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01k8bgjgh9670ardpst8vzvwf5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-10-24T16:24:34.893211Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-10-24T16:24:34.896354Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NDFlZjVhOTEtNDE0OWY4ZGEtODY5N2EyMmEtNjNmNDI4Zjc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01k8bgjgh9670ardpst8vzvwf5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-10-24T16:24:34.896362Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session will restart in 2.000000s 2025-10-24T16:24:34.896386Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: Do CDS request 2025-10-24T16:24:34.896391Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Do schedule cds request after 2000 ms 2025-10-24T16:24:35.089359Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: close. Timeout = 0 ms 2025-10-24T16:24:35.089375Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session will now close 2025-10-24T16:24:35.089388Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: aborting 2025-10-24T16:24:35.089596Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-10-24T16:24:35.089603Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3435f1db-27d1540c-940b076c-e56fd90_0] Write session: destroy 2025-10-24T16:24:35.876248Z node 15 :KQP_EXECUTER WARN: kqp_shards_resolver.cpp:86: [ShardsResolver] TxId: 281474976710685. Failed to resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:35.876298Z node 15 :KQP_EXECUTER WARN: kqp_executer_impl.h:245: ActorId: [15:7564825004776387431:2530] TxId: 281474976710685. Ctx: { TraceId: 01k8bgjh7c1tdm9e0fxrcxr0eb, Database: /Root, SessionId: ydb://session/3?node_id=15&id=NTViYmJkNC1jN2Q1OGQ0NS0xMWE3OWU1NS1hODQ4OTlmOA==, PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-10-24T16:24:35.876393Z node 15 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=15&id=NTViYmJkNC1jN2Q1OGQ0NS0xMWE3OWU1NS1hODQ4OTlmOA==, ActorId: [15:7564825004776387417:2530], ActorState: ExecuteState, TraceId: 01k8bgjh7c1tdm9e0fxrcxr0eb, Create QueryResponse for error on request, msg: 2025-10-24T16:24:35.876903Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01k8bgjh9n8jjdvzjzpb12xv7w" } } YdbStatus: UNAVAILABLE ConsumedRu: 46 } >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress >> TExportToS3Tests::SchemaMapping |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:32.057316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:32.057341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:32.057347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:32.057356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:32.057363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:32.057368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:32.057377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:32.057391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:32.057510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:32.057570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:32.073308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:32.073325Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:32.076522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:32.076550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:32.076584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:32.077744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:32.077903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:32.078008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.078070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:32.078890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.078926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:32.079134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.079144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:32.079167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:32.079173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:32.079191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:32.079213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.080517Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:32.102443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:32.102531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.102590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:32.102598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:32.102661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:32.102675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:32.103457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.103497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:32.103556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.103567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:32.103572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:32.103579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:32.104002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.104014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:32.104019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:32.104348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.104359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:32.104364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.104374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:32.105016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:32.105399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:32.105450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:32.105646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:32.105668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:32.105675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.105737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:32.105744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:32.105771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:32.105783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:32.106192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:32.106201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... .cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:24:36.825792Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710761:0 2025-10-24T16:24:36.825796Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710761:0 2025-10-24T16:24:36.825806Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:24:36.825812Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-10-24T16:24:36.825816Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-10-24T16:24:36.825819Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-10-24T16:24:36.825927Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826190Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:36.826198Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:36.826223Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:24:36.826249Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:36.826253Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-10-24T16:24:36.826257Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 5 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-10-24T16:24:36.826370Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826379Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826383Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:24:36.826387Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-10-24T16:24:36.826391Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:24:36.826445Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826455Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:24:36.826459Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-10-24T16:24:36.826462Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:24:36.826470Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-10-24T16:24:36.826476Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:129:2153] 2025-10-24T16:24:36.826938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826980Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:24:36.826993Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:24:36.827003Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:24:36.827310Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:24:36.827320Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:547:2495] TestWaitNotification: OK eventTxId 102 2025-10-24T16:24:36.827424Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:24:36.827468Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 52us result status StatusSuccess 2025-10-24T16:24:36.827673Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> KqpScanArrowInChanels::SingleKey >> TExportToS3Tests::EnableChecksumsPersistance >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags >> TExportToS3Tests::AuditCompletedExport >> TGroupMapperTest::CheckNotToBreakFailModel |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_5 >> TExportToS3Tests::SchemaMapping [GOOD] >> ViewerTopicDataTests::TopicDataTest [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::SchemaMappingEncryption ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 8858, MsgBus: 18910 2025-10-24T16:24:26.543159Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824966914276530:2259];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:26.543179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000940/r3tmp/tmpxXFgzr/pdisk_1.dat 2025-10-24T16:24:27.489597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:27.489619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:27.492640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:27.589110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:27.589435Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 8858, node 1 2025-10-24T16:24:27.621060Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:27.644407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:27.644420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:27.644422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:27.644459Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:27.785906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18910 TClient is connected to server localhost:18910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:28.653941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:28.667166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:28.807199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:24:28.824159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:24:29.532383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824979799178894:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.532415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.532596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824979799178904:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.532603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.011149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.160116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.191346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.220935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.252390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824984094146505:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.252428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.252507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824984094146510:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.252698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824984094146511:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.252715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.255820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:30.268040Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824984094146514:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-10-24T16:24:30.345527Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824984094146565:2575] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 18515, MsgBus: 22248 2025-10-24T16:24:31.273961Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824989989347348:2218];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:31.274070Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000940/r3tmp/tmpSci8Fb/pdisk_1.dat 2025-10-24T16:24:31.320950Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:31.320978Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:31.324378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:31.327996Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:31.332258Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564824989989347159:2081] 1761323071263450 != 1761323071263453 2025-10-24T16:24:31.337411Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18515, node 2 2025-10-24T16:24:31.370865Z node 2 :NET_CLASSIFIER WA ... IER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:37.521082Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30485 TClient is connected to server localhost:30485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:37.736517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:37.740430Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:37.749594Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:37.792976Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:37.952893Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:38.025414Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:38.200932Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:38.530546Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782848809:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.530585Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.537457Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782848891:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.537499Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.537667Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782848893:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.537676Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.540460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.567661Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.583748Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.616497Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.639374Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.672466Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.719904Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.770550Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.820870Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782849678:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.820941Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.821142Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782849689:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.821151Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825021782849688:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.821156Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.821921Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:38.835018Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825021782849692:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:38.928593Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825021782849744:3558] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |55.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> KqpOlapJson::EmptyVariants[2,true,1,10,1000000,0] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] >> TExportToS3Tests::EncryptedExport |55.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] |55.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert-udf_empty--Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> Viewer::SimpleFeatureFlags [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> KqpResultSetFormats::ArrowFormat_Types_Optional_5 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> Cdc::InitialScan [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> TExportToS3Tests::EncryptedExport [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 14452, MsgBus: 6442 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00094f/r3tmp/tmp8Vvz7y/pdisk_1.dat 2025-10-24T16:24:26.151959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.152027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14452, node 1 2025-10-24T16:24:26.168901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.168945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.184277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:26.194252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.194269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.194270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.194308Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:26.204598Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.210773Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824969141216582:2081] 1761323066057409 != 1761323066057412 TClient is connected to server localhost:6442 TClient is connected to server localhost:6442 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:24:26.378812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.484533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:26.491276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:26.504621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.539821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.917340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.091102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:27.113210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:30.077554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986321087449:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.077601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.077808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986321087459:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.077819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.480009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.527749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.585056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.655800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.701420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.734855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.748040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.806012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.942644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986321088326:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.942667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.942809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986321088331:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.942816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986321088332:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.942864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.636840Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.636976Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825026083548924:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.636983Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.653151Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.677666Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.695121Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.714125Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.739088Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.753984Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.772852Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.792496Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.821789Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825026083549785:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.821818Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.821904Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825026083549790:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.821911Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825026083549791:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.821926Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:39.822881Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:39.829140Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:24:39.829231Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825026083549794:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:39.889039Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825026083549846:3549] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:40.084171Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:40.368244Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-10-24T16:24:40.370120Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715673, at schemeshard: 72057594046644480 2025-10-24T16:24:40.387939Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-10-24T16:24:40.389973Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715674, at schemeshard: 72057594046644480 2025-10-24T16:24:40.405469Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |55.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |55.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 23170, MsgBus: 19055 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000951/r3tmp/tmpCSZ9NG/pdisk_1.dat 2025-10-24T16:24:26.117075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.117132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:26.117471Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.120474Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824970396048121:2081] 1761323066045673 != 1761323066045676 TServer::EnableGrpc on GrpcPort 23170, node 1 2025-10-24T16:24:26.144177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.144191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.144193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.144239Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:26.168126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.168156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.174800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19055 TClient is connected to server localhost:19055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:24:26.386425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.396620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:26.421297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.563150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.831297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.857977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.341263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:29.187076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824983280951684:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.187170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.187628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824983280951694:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.187636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.556729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.664767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.750037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.792037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.833056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.921399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.035825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.162161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.254412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824987575919857:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.254434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.254540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824987575919863:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.254545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824987575919862:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.254560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.255398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation p ... peration type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.534350Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:38.731534Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825018946402447:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.731553Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.731700Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825018946402457:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.731706Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.751484Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.760447Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.769644Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.787650Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.799628Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.812817Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.826497Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.898080Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:38.929752Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825018946403326:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.929774Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.929870Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825018946403331:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.929879Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825018946403332:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.929936Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:38.931029Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:38.935729Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825018946403335:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:39.019702Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825023241370683:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:39.272002Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:39.644435Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.745162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:39.826022Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:39.915254Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:40.042355Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:40.134573Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:40.226861Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:40.254560Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-10-24T16:24:40.910818Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715712:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 |55.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> test.py::test[schema-yamred_dsv_select_from_dict--Results] [GOOD] >> test.py::test[select-anon_clash--Results] >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase |55.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:35.761314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:35.761351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:35.761358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:35.761366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:35.761372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:35.761377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:35.761386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:35.761400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:35.761515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:35.761581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:35.992534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:35.992563Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:36.009482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:36.009525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:36.009564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:36.010640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:36.010756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:36.010850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.010912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:36.011600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:36.011637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:36.011843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:36.011865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:36.011888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:36.011896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:36.011901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:36.011922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.014477Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:36.361999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:36.362096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.362161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:36.362171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:36.373151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:36.373201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:36.392325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.392388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:36.392472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.392488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:36.392495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:36.392502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:36.400125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.400149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:36.400157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:36.408117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.408137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.408144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.408154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:36.412217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:36.420380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:36.421743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:36.421958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.421994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:36.422003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.422074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:36.422082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.422113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:36.422124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:36.428341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:36.428355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... eshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-10-24T16:24:41.915257Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-10-24T16:24:41.915297Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-10-24T16:24:41.915504Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:41.915534Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 21474838638 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:41.915542Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-10-24T16:24:41.915568Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:24:41.915579Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:41.915584Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:41.915590Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:24:41.915594Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:41.915603Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:24:41.915614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:24:41.915621Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-10-24T16:24:41.915629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:24:41.915634Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-10-24T16:24:41.915639Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710758:0 2025-10-24T16:24:41.915650Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:24:41.915657Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-10-24T16:24:41.915663Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:24:41.915667Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-10-24T16:24:41.916016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916042Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916396Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:41.916409Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:41.916451Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-10-24T16:24:41.916480Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:41.916487Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2214], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-10-24T16:24:41.916493Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2214], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-10-24T16:24:41.916673Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916690Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916696Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:41.916702Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:24:41.916708Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:24:41.916796Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916810Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.916815Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:24:41.916820Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-10-24T16:24:41.916825Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-10-24T16:24:41.916837Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-10-24T16:24:41.916843Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:139:2161] 2025-10-24T16:24:41.916903Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:24:41.916910Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-10-24T16:24:41.916921Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:24:41.917456Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.917726Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-10-24T16:24:41.917754Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-10-24T16:24:41.917766Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710758 2025-10-24T16:24:41.917774Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.917780Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-10-24T16:24:41.917786Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-10-24T16:24:41.917851Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:24:41.918203Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-10-24T16:24:41.918266Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:24:41.918274Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:24:41.918357Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:24:41.918375Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:24:41.918381Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:555:2513] TestWaitNotification: OK eventTxId 103 |55.5%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |55.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:34.156024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:34.156067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:34.156073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:34.156083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:34.156091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:34.156096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:34.156107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:34.156123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:34.156257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:34.156328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:34.181078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:34.181102Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:34.187346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:34.187386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:34.187429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:34.193049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:34.193216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:34.193344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.193423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:34.194268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:34.194312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:34.194556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:34.194566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:34.194593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:34.194601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:34.194606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:34.194632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.196006Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:34.288105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:34.288209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.288268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:34.288276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:34.290965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:34.290993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:34.296401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.296466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:34.296547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.296563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:34.296570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:34.296578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:34.297618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.297631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:34.297636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:34.298275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.298285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.298292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.298300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:34.301459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:34.308744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:34.309932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:34.310159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.310191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:34.310199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.310275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:34.310283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.310319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:34.310331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:34.311329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:34.311337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 94046678944, LocalPathId: 1], version: 13 2025-10-24T16:24:41.582901Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-10-24T16:24:41.582997Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:24:41.583008Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:24:41.583013Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-10-24T16:24:41.583018Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-10-24T16:24:41.583023Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-10-24T16:24:41.583033Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-10-24T16:24:41.583039Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:129:2153] 2025-10-24T16:24:41.592218Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-10-24T16:24:41.592364Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-10-24T16:24:41.592392Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:24:41.592422Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:24:41.592443Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.592450Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:24:41.592458Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-10-24T16:24:41.593242Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:24:41.593269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:24:41.593279Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:619:2554] TestWaitNotification: OK eventTxId 103 2025-10-24T16:24:41.593537Z node 5 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:24:41.599006Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.599033Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:736: TExport::TTxProgress: Resume: id# 103 2025-10-24T16:24:41.599055Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:534: TExport::TTxProgress: Allocate txId: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:24:41.599073Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:24:41.599104Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 103, at schemeshard: 72057594046678944 2025-10-24T16:24:41.599111Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.599118Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:856: TExport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 103 2025-10-24T16:24:41.599128Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:526: TExport::TTxProgress: Drop propose: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710764 2025-10-24T16:24:41.599151Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:24:41.600063Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-103" } Internal: true } TxId: 281474976710764 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-10-24T16:24:41.600238Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-103, pathId: 0, opId: 281474976710764:0, at schemeshard: 72057594046678944 2025-10-24T16:24:41.600289Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710764:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2025-10-24T16:24:41.604259Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710764, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763, at schemeshard: 72057594046678944 2025-10-24T16:24:41.604342Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710764, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-103', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/export-103 2025-10-24T16:24:41.604404Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-10-24T16:24:41.604417Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-10-24T16:24:41.604434Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.604440Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:917: TExport::TTxProgress: OnModifyResult: txId# 281474976710764, status# StatusPathDoesNotExist 2025-10-24T16:24:41.604449Z node 5 :EXPORT TRACE: schemeshard_export__create.cpp:918: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-103\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 4], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710763, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710764 SchemeshardId: 72057594046678944 PathId: 4 PathDropTxId: 281474976710763 2025-10-24T16:24:41.604501Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:1099: TExport::TTxProgress: Wait for completion: info# { Id: 103 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 4] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710763 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2025-10-24T16:24:41.609266Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:24:41.609335Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710763, at schemeshard: 72057594046678944 2025-10-24T16:24:41.609370Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:24:41.609394Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:24:41.609401Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:24:41.609406Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:24:41.609413Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-10-24T16:24:41.616316Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-10-24T16:24:41.616431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:24:41.616443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:24:41.616560Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:24:41.616597Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:24:41.616604Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:1276:3152] TestWaitNotification: OK eventTxId 103 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> test.py::test[action-eval_each_input_table-default.txt-Results] [GOOD] >> test.py::test[action-eval_filter--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> KqpOlapJson::EmptyVariants[2,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,10,1000000,0.5] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> test.py::test[select-anon_clash--Results] [GOOD] >> test.py::test[select-corr_name_in_select-default.txt-Results] >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |55.6%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |55.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge |55.6%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-14581_fuseflatmaps_with_external_lambda--Results] [SKIPPED] >> test.py::test[optimizers-yql-20264-default.txt-Results] |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> KqpOlapJson::EmptyVariants[2,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 19583, MsgBus: 8613 2025-10-24T16:24:25.971596Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824965399483921:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:25.971769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014d2/r3tmp/tmpOACzdD/pdisk_1.dat 2025-10-24T16:24:26.095651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.102579Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.103244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.103268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.104374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19583, node 1 2025-10-24T16:24:26.118066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.118077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.118079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.118114Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8613 TClient is connected to server localhost:8613 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:24:26.257986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.359773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:26.379980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:26.394787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.632976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:24:26.907739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:27.007954Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:27.070237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:30.263940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986874322029:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.263973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.268028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986874322039:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.268046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.596748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.612633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.622749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.654014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.694518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.725275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.752996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.843607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.904227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986874322902:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.904256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.904438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986874322907:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.904447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824986874322908:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.904511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, i ... to server localhost:18827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:43.762706Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:43.764912Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:44.151386Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825045001524620:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:44.151416Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:44.151563Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825045001524655:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:44.151570Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825045001524656:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:44.151581Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:44.152476Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:44.155514Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:24:44.155607Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564825045001524659:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:24:44.246478Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564825045001524710:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=1; Trying to start YDB, gRPC: 5235, MsgBus: 9607 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014d2/r3tmp/tmpElTHH8/pdisk_1.dat 2025-10-24T16:24:44.664051Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:44.664097Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:44.684318Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:44.684352Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:44.685188Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:44.687192Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:7564825044646913910:2081] 1761323084641222 != 1761323084641225 2025-10-24T16:24:44.689920Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5235, node 13 2025-10-24T16:24:44.700663Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:44.700673Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:44.700675Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:44.700725Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9607 TClient is connected to server localhost:9607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:44.836875Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:44.840698Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:44.940976Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:45.171557Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825048941881892:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.171576Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825048941881860:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.171636Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.172474Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825048941881898:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.172496Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.172652Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:45.176231Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:24:45.176349Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7564825048941881897:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:45.273098Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7564825048941881950:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; >> Worker::Basic >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> test.py::test[insert-udf_empty--Results] [GOOD] >> test.py::test[insert-yql-14538--Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] |55.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 7523, MsgBus: 28345 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014cd/r3tmp/tmp7eXCQG/pdisk_1.dat 2025-10-24T16:24:26.001654Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824963128959939:2271];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:26.001747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:26.123948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.123983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.127141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:26.132296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.133736Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.138402Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824963128959680:2081] 1761323065995602 != 1761323065995605 TServer::EnableGrpc on GrpcPort 7523, node 1 2025-10-24T16:24:26.166634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.166648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.166651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.166718Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28345 2025-10-24T16:24:26.288545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.548064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:26.580285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:26.605861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:26.845688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.052595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:27.158903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.252219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.181435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824980308830540:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.181544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.181713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824980308830550:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.181724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.337316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.395672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.441294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.484480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.562706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.632673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.645313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.736763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.847588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824980308831415:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.847640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.847792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824980308831418:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.847803Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.847904Z node 1 ... oot TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:44.888516Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:44.896648Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:44.901837Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:45.230411Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825051170160325:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.230437Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.230551Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825051170160337:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.230557Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825051170160338:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.230576Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:45.231494Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:45.234348Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564825051170160341:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:24:45.295800Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564825051170160392:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 62209, MsgBus: 7280 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014cd/r3tmp/tmp5FI3XK/pdisk_1.dat 2025-10-24T16:24:45.685431Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:45.691913Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:45.692276Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:45.692680Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:45.692699Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:45.696007Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:7564825051056596610:2081] 1761323085659775 != 1761323085659778 2025-10-24T16:24:45.696805Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62209, node 13 2025-10-24T16:24:45.710282Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:45.710295Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:45.710298Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:45.710348Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7280 TClient is connected to server localhost:7280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:24:45.820748Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:45.823623Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:45.924962Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:46.261944Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825055351564559:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.261969Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.267945Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825055351564593:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.267953Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825055351564595:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.267989Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.269128Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:46.272526Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:24:46.272639Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7564825055351564598:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:24:46.354905Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7564825055351564649:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-10-24T16:24:41.673236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:41.841167Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:41.841255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:41.841266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001795/r3tmp/tmpSPoybt/pdisk_1.dat 2025-10-24T16:24:42.080993Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:42.082251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:42.082290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:42.082439Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323081071757 != 1761323081071760 2025-10-24T16:24:42.117029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:42.204724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:42.243706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:42.343734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:42.383333Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:24:42.385192Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:42.469928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:42.469967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:42.470443Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:24:42.470452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:24:42.470459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:24:42.470529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:42.470551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:42.470562Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:24:42.480833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:42.517346Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:24:42.517451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:42.517497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:24:42.517502Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:42.517508Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:24:42.517515Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.517724Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:24:42.517758Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:24:42.517781Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.517789Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.517800Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:42.517805Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.517817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:24:42.517947Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:24:42.518017Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:24:42.518038Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:24:42.519909Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:42.532338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:24:42.532383Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:24:42.693909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:24:42.695808Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:24:42.695830Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.695913Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.695922Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:24:42.695934Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:24:42.696296Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:24:42.696342Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:24:42.696702Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.696723Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:24:42.698619Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:24:42.698997Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.699980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:24:42.699991Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.700035Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:24:42.700047Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.700368Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.700377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:42.700384Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:42.700401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:42.700410Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:24:42.700422Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.710763Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:42.711667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:42.711789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:24:42.711798Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:42.716565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7 ... immediate 0 planned 1 2025-10-24T16:24:46.250548Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.250560Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:24:46.250645Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:24:46.251434Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.251963Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:24:46.251971Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.252066Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:24:46.252076Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.252243Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.252250Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:46.252255Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:46.252269Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:46.252278Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-10-24T16:24:46.252286Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.252721Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.252941Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:46.252969Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-10-24T16:24:46.252975Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:46.265217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.265246Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:751:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.265256Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.265396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.265412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.266973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:46.269473Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.320042Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:46.433496Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.433975Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:24:46.473149Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:827:2660] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:46.530114Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710660. Ctx: { TraceId: 01k8bgjwdraw43tt5fnqfgs4wj, Database: , SessionId: ydb://session/3?node_id=2&id=ZTJhZWQ3M2UtZWFmMmM0MzQtOWVmMzgzZGYtNWU5OTVhNjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:46.539701Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:858:2677], serverId# [2:859:2678], sessionId# [0:0:0] 2025-10-24T16:24:46.539862Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:24:46.539913Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-10-24T16:24:46.550375Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.555236Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:866:2684], serverId# [2:867:2685], sessionId# [0:0:0] 2025-10-24T16:24:46.560037Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:24:46.572079Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:24:46.572110Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.572186Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:24:46.572199Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-10-24T16:24:46.572276Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.572286Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.572294Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:46.572308Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.572325Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:866:2684], serverId# [2:867:2685], sessionId# [0:0:0] 2025-10-24T16:24:46.572704Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:24:46.572787Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:24:46.572822Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.572827Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.572834Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2025-10-24T16:24:46.573009Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.573018Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.573395Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2025-10-24T16:24:46.573598Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-10-24T16:24:46.573621Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2025-10-24T16:24:46.573627Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2025-10-24T16:24:46.573689Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:24:46.573695Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2025-10-24T16:24:46.573713Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.573717Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.573722Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2025-10-24T16:24:46.573746Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.573752Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.573758Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut >> test_secrets_usage.py::test_create_eds_with_single_secret_with_success |55.6%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |55.6%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: Build = 3.730004256 Merge = 13.75636682 less-than assertion failed at ydb/core/viewer/viewer_ut.cpp:158, virtual void NTestSuiteViewer::TTestCaseTabletMergingPacked::Execute_(NUnitTest::TTestContext &): timer.Passed() < 3 * BASE_PERF TBackTrace::Capture()+28 (0x14CC0D0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+128 (0x14E7BFA0) NTestSuiteViewer::TTestCaseTabletMergingPacked::Execute_(NUnitTest::TTestContext&)+1938 (0x14B29262) NTestSuiteViewer::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x14B74427) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0x14E7E178) NTestSuiteViewer::TCurrentTest::Execute()+421 (0x14B73D25) NUnitTest::TTestFactory::Execute()+817 (0x14E7E8C1) NUnitTest::RunMain(int, char**)+3229 (0x14E90D5D) ??+0 (0x7F58DD206D90) __libc_start_main+128 (0x7F58DD206E40) _start+41 (0x13CB2029) Data has built Merge = 0.08304711334 Data has merged 2025-10-24T16:23:45.163686Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.163778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.166529Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.166672Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.166719Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.211234Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.211262Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.212212Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1581:2421], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.212414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.212559Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.212672Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1585:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.212792Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.212801Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213198Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1573:2253], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.213280Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1577:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.213331Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213375Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213386Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213420Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213506Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1589:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.213557Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.213585Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:45.343881Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:45.405006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.405045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.405178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.405187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.405208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.405216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.405258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.405264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.405293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.405303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.481058Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:45.481173Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-10-24T16:23:45.481257Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:23:45.481312Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:23:45.481408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.481583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.481610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.481634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.481659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:45.568039Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:23:45.571743Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:23:45.615028Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 4872, node 1 TClient is connected to server localhost:22563 2025-10-24T16:23:45.655486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:45.655512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:45.655517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:45.655731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration Request timer = 44.26486418 BASE_PERF = 1.678712481 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:32.687468Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7564824995460672727:2254];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:32.687510Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:32.708784Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:32.758092Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:32.758495Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:7564824995460672486:2081] 1761323072684800 != 1761323072684803 TServer::EnableGrpc on GrpcPort 26906, node 6 2025-10-24T16:24:32.776306Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:32.776319Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:32.776321Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:2 ... d25-6b7c0219_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { persist_duration_ms: 6 queued_in_partition_duration_ms: 1 } 2025-10-24T16:24:35.572846Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|eb40fbeb-fbaa953c-9f7eed25-6b7c0219_0] Write session: acknoledged message 20 2025-10-24T16:24:35.656126Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|eb40fbeb-fbaa953c-9f7eed25-6b7c0219_0] Write session will now close 2025-10-24T16:24:35.656150Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|eb40fbeb-fbaa953c-9f7eed25-6b7c0219_0] Write session: aborting 2025-10-24T16:24:35.656319Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|eb40fbeb-fbaa953c-9f7eed25-6b7c0219_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:24:35.656334Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|eb40fbeb-fbaa953c-9f7eed25-6b7c0219_0] Write session: destroy 2025-10-24T16:24:35.930411Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825006626058157:2381], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:35.931289Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=YTlkNDYzMi1lOTVkMTdhMy0xZjNiZTY4Ni1iNWQxM2JkMw==, ActorId: [7:7564825006626058150:2377], ActorState: ExecuteState, TraceId: 01k8bgjjam3gzvxfr77293f0sc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:35.931453Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:36.936711Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825010921025467:2387], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:36.937224Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=NTYzYTZkNzktYTIyMzViNmUtNGE0NTIxMjgtYzVlOWViMzU=, ActorId: [7:7564825010921025465:2386], ActorState: ExecuteState, TraceId: 01k8bgjka4ac9gbm9w8swtvw5k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:36.937333Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:37.945727Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825015215992777:2393], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:37.946331Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=Y2Y2NjdkMjEtMTA0MThjZjItZWIyNzg5ODMtMjM4YTQ0ZTk=, ActorId: [7:7564825015215992775:2392], ActorState: ExecuteState, TraceId: 01k8bgjm9m4zqpd6jwdmasv181, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:37.946461Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:38.957515Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825019510960088:2399], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:38.958214Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=MTQ3NDE4NWItYWYzZGI1MTItYWYwNmU1NTQtZDE3ODZiMDk=, ActorId: [7:7564825019510960086:2398], ActorState: ExecuteState, TraceId: 01k8bgjn98f1jef65x9s4bq9r6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:38.958330Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-10-24T16:24:39.733552Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-10-24T16:24:39.733747Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-10-24T16:24:39.733755Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:13549 2025-10-24T16:24:39.736491Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-10-24T16:24:39.741713Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1761323079741 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:39.741769Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|a797af03-3c696dc2-5a03085a-386b264f_0" topic: "topic1" 2025-10-24T16:24:39.752988Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write 1 messages with Id from 1 to 1 2025-10-24T16:24:39.753058Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: close. Timeout = 18446744073709551 ms 2025-10-24T16:24:39.768636Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: try to update token 2025-10-24T16:24:39.768664Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Send 1 message(s) (0 left), first sequence number is 1 2025-10-24T16:24:39.779972Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 1 } 2025-10-24T16:24:39.779997Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: acknoledged message 1 2025-10-24T16:24:39.853166Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session will now close 2025-10-24T16:24:39.853194Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: aborting 2025-10-24T16:24:39.853427Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:24:39.853454Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|a797af03-3c696dc2-5a03085a-386b264f_0] Write session: destroy 2025-10-24T16:24:39.965559Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825023805927423:2412], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:39.966158Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=OGNlZjlkMDUtNWU3OTIwNTctMjZlMWEzOTQtMjc0NTJlZTg=, ActorId: [7:7564825023805927421:2411], ActorState: ExecuteState, TraceId: 01k8bgjp8r85rdw4ty2bbpxy1v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:39.966317Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic Got response:400: Bad offset 2025-10-24T16:24:40.060995Z node 7 :PERSQUEUE ERROR: partition_read.cpp:823: [72075186224037889][Partition][0][StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 2025-10-24T16:24:40.061019Z node 7 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic1' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 10000, 0 EndOffset 61 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-10-24T16:24:41.728126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:41.766034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:41.766124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:41.766135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001798/r3tmp/tmpumGeBR/pdisk_1.dat 2025-10-24T16:24:41.860108Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:41.861161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:41.861199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:41.861315Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323081194401 != 1761323081194404 2025-10-24T16:24:41.895332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:41.984403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:42.058262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:42.159796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:42.208260Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:24:42.208374Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:42.262649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:42.262703Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:42.263205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:24:42.263219Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:24:42.263227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:24:42.263330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:42.263375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:42.263392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:24:42.275909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:42.299240Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:24:42.299361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:42.299403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:24:42.299409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:42.299414Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:24:42.299421Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.299629Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:24:42.299666Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:24:42.299684Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.299693Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.299704Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:42.299709Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.299722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:24:42.299842Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:24:42.299925Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:24:42.299947Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:24:42.301632Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:42.311998Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:24:42.312047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:24:42.460769Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:24:42.462608Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:24:42.462628Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.462701Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.462711Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:24:42.462724Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:24:42.463122Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:24:42.463159Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:24:42.463508Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.463530Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:24:42.469695Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:24:42.470116Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.471149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:24:42.471162Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.471216Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:24:42.471229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.472680Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.472695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:42.472703Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:42.472723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:42.472737Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:24:42.472754Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.482947Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:42.483596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:42.483747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:24:42.483760Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:42.488267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7 ... immediate 0 planned 1 2025-10-24T16:24:46.597308Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.597321Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:24:46.597409Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:24:46.597729Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.598041Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:24:46.598049Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.598174Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:24:46.598183Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.598396Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.598415Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.598422Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:46.598427Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:46.598443Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:46.598452Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:24:46.598462Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.598946Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:24:46.598958Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:46.599036Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:46.612062Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.612088Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:751:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.612099Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.612372Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.612386Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:46.613484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:46.615104Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.666509Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:46.785366Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:46.785859Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:46.824963Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:827:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:46.874083Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgjwrk4sawc1m490ej2k2p, Database: , SessionId: ydb://session/3?node_id=3&id=ZGZjZGU0OWEtMjBhZGYzODMtNzc0ZTk1ZjgtNmQ4OTkyZjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:46.874632Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:858:2677], serverId# [3:859:2678], sessionId# [0:0:0] 2025-10-24T16:24:46.874746Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:24:46.874791Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-10-24T16:24:46.886366Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.887475Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:866:2684], serverId# [3:867:2685], sessionId# [0:0:0] 2025-10-24T16:24:46.887695Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:24:46.899506Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:24:46.899535Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:46.899603Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:24:46.899611Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-10-24T16:24:46.899681Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.899689Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.899698Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:46.899711Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.899728Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:866:2684], serverId# [3:867:2685], sessionId# [0:0:0] 2025-10-24T16:24:46.899965Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:24:46.900052Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:24:46.900088Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.900094Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.900102Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-10-24T16:24:46.900144Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.900152Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.900386Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-10-24T16:24:46.900598Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-10-24T16:24:46.900619Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-10-24T16:24:46.900625Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-10-24T16:24:46.900703Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:24:46.900709Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-10-24T16:24:46.900729Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:46.900733Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:24:46.900738Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-10-24T16:24:46.900763Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:46.900771Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:46.900777Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> test_auditlog.py::test_create_and_remove_tenant >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::EmptyVariants[2,true,1,1000,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-10-24T16:24:41.798478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:41.835802Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:41.835910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:41.835923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00179a/r3tmp/tmpPhyDUP/pdisk_1.dat 2025-10-24T16:24:42.023189Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:42.027469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:42.027765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:42.027894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323081250492 != 1761323081250495 2025-10-24T16:24:42.064293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:42.142385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:42.200909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:42.290115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:42.337994Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:697:2582] 2025-10-24T16:24:42.338874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:42.419936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:42.419989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:42.420160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:24:42.420173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:24:42.420180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:24:42.420253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:42.420645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:42.420666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:731:2582] in generation 1 2025-10-24T16:24:42.420785Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:701:2585] 2025-10-24T16:24:42.420819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:42.437505Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:704:2588] 2025-10-24T16:24:42.437550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:42.450222Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:42.450261Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:42.450694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:24:42.450703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:24:42.450709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:24:42.450753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:42.450780Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:42.450790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:744:2585] in generation 1 2025-10-24T16:24:42.450851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:42.450863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:42.450949Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-10-24T16:24:42.450955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-10-24T16:24:42.450960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-10-24T16:24:42.450983Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:42.450992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:42.450998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:745:2588] in generation 1 2025-10-24T16:24:42.464249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:42.538425Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:24:42.540153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:42.540211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:749:2613] 2025-10-24T16:24:42.540217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:42.540223Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:24:42.540228Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:42.546594Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:42.546617Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:24:42.546648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:42.546670Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:750:2614] 2025-10-24T16:24:42.546675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:24:42.546679Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:24:42.546683Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:24:42.547498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:42.547508Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-10-24T16:24:42.547520Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:42.547530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:751:2615] 2025-10-24T16:24:42.547533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-10-24T16:24:42.547536Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-10-24T16:24:42.547540Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:24:42.547608Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:24:42.547635Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:24:42.547662Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:42.547668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.547677Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:42.547681Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:42.547686Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:24:42.547697Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:24:42.547718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:687:2577], serverId# [1:699:2583], sessionId# [0:0:0] 2025-10-24T16:24:42.547723Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:24:42.547728Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:42.550485Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:24:42.550505Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:24:42.550514Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2025-10-24T16:24:42.550532Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxChec ... 75186224037888 2025-10-24T16:24:47.501723Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:47.501736Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:47.501743Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:24:47.501751Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.502074Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:24:47.502085Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:47.502153Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:47.503511Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.503526Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:751:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.503534Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.503657Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.503666Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.504751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:47.507570Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:47.564354Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:47.673541Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:47.674122Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:24:47.712991Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:827:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:47.775291Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k8bgjxmf61daw9j91yej4sc6, Database: , SessionId: ydb://session/3?node_id=3&id=Nzk0OWJkODYtODIwODMyNWUtYzU1NmZjZGYtNjQ4NTk0Mzc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:47.783949Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:858:2677], serverId# [3:859:2678], sessionId# [0:0:0] 2025-10-24T16:24:47.784136Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:24:47.784181Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-10-24T16:24:47.796085Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.839092Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgjxxrfwabktej5wvxpgzx, Database: , SessionId: ydb://session/3?node_id=3&id=OTgyZjRiZjAtMTY1OTAyZTQtZjU1ZGM3Y2MtOGM2NTBjM2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:47.839810Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-10-24T16:24:47.841590Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:898:2709], serverId# [3:899:2710], sessionId# [0:0:0] 2025-10-24T16:24:47.841875Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:24:47.852900Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:24:47.852935Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.852951Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 1501 from mediator time cast 2025-10-24T16:24:47.853153Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-10-24T16:24:47.853161Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.853230Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:24:47.853239Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-10-24T16:24:47.853311Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:47.853322Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:47.853333Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:47.853343Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:47.853366Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:898:2709], serverId# [3:899:2710], sessionId# [0:0:0] 2025-10-24T16:24:47.864699Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgjxzdb45x8kvt1a5cvpgt, Database: , SessionId: ydb://session/3?node_id=3&id=OTgyZjRiZjAtMTY1OTAyZTQtZjU1ZGM3Y2MtOGM2NTBjM2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:47.865392Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:6] at 72075186224037888 2025-10-24T16:24:47.865456Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-10-24T16:24:47.866982Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:800: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-10-24T16:24:47.867058Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:24:47.867107Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:24:47.867125Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.867196Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:911: SelfId: [3:920:2683], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:865:2683]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:920:2683].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-10-24T16:24:47.867363Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [3:913:2683], SessionActorId: [3:865:2683], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:865:2683]. 2025-10-24T16:24:47.867471Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=3&id=OTgyZjRiZjAtMTY1OTAyZTQtZjU1ZGM3Y2MtOGM2NTBjM2Q=, ActorId: [3:865:2683], ActorState: ExecuteState, TraceId: 01k8bgjxzdb45x8kvt1a5cvpgt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:914:2683] from: [3:913:2683] 2025-10-24T16:24:47.867521Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [3:914:2683] TxId: 281474976715662. Ctx: { TraceId: 01k8bgjxzdb45x8kvt1a5cvpgt, Database: , SessionId: ydb://session/3?node_id=3&id=OTgyZjRiZjAtMTY1OTAyZTQtZjU1ZGM3Y2MtOGM2NTBjM2Q=, PoolId: default, DatabaseId: /Root}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-10-24T16:24:47.867613Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:7] at 72075186224037888 2025-10-24T16:24:47.867625Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:461: Skip empty write operation for [0:7] at 72075186224037888 2025-10-24T16:24:47.867657Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:47.867684Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=3&id=OTgyZjRiZjAtMTY1OTAyZTQtZjU1ZGM3Y2MtOGM2NTBjM2Q=, ActorId: [3:865:2683], ActorState: ExecuteState, TraceId: 01k8bgjxzdb45x8kvt1a5cvpgt, Create QueryResponse for error on request, msg: >> BsControllerConfig::MergeBoxes [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test.py::test[select-corr_name_in_select-default.txt-Results] [GOOD] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> test.py::test[select-dict_with_few_keys-default.txt-Results] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10913:2167] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10913:2167] Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11117:2156] recipient: [1:10913:2167] 2025-10-24T16:23:39.461806Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:23:39.462165Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:23:39.462270Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:23:39.462758Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:23:39.462864Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:23:39.462989Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.462996Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:23:39.463094Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:23:39.464431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:23:39.464470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:23:39.464504Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:23:39.464528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.464541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:23:39.464552Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11138:2156] recipient: [1:110:2157] 2025-10-24T16:23:39.476142Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:23:39.476214Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.512171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:23:39.512236Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.512256Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:23:39.512276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.512309Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:23:39.512320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.512326Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:23:39.512340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.522790Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:23:39.522864Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.533741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:23:39.533831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:23:39.534111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:23:39.534122Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:23:39.534175Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:23:39.534192Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:23:39.540136Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... OTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 275:1000 Path# /dev/disk1 2025-10-24T16:24:40.827004Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 275:1001 Path# /dev/disk2 2025-10-24T16:24:40.827009Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-10-24T16:24:40.827015Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-10-24T16:24:40.827023Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-10-24T16:24:40.827029Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-10-24T16:24:40.827034Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-10-24T16:24:40.827040Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-10-24T16:24:40.827045Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-10-24T16:24:40.827050Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-10-24T16:24:40.827055Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-10-24T16:24:40.827061Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-10-24T16:24:40.827066Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-10-24T16:24:40.827072Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-10-24T16:24:40.827077Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-10-24T16:24:40.827081Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-10-24T16:24:40.827086Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-10-24T16:24:40.827090Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-10-24T16:24:40.827095Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-10-24T16:24:40.827099Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-10-24T16:24:40.827104Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-10-24T16:24:40.827108Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-10-24T16:24:40.827113Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-10-24T16:24:40.827118Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-10-24T16:24:40.827122Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-10-24T16:24:40.827126Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-10-24T16:24:40.827131Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-10-24T16:24:40.827135Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-10-24T16:24:40.827141Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-10-24T16:24:40.827145Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-10-24T16:24:40.827151Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-10-24T16:24:40.827156Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-10-24T16:24:40.827162Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-10-24T16:24:40.827167Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-10-24T16:24:40.827173Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-10-24T16:24:40.827179Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-10-24T16:24:40.827185Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-10-24T16:24:40.827191Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-10-24T16:24:40.827197Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-10-24T16:24:40.827201Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-10-24T16:24:40.827206Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-10-24T16:24:40.827210Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-10-24T16:24:40.827214Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-10-24T16:24:40.827219Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-10-24T16:24:40.827224Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-10-24T16:24:40.827228Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-10-24T16:24:40.827232Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-10-24T16:24:40.827237Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-10-24T16:24:40.827241Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-10-24T16:24:40.827246Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-10-24T16:24:40.827252Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-10-24T16:24:40.827257Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-10-24T16:24:40.827263Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-10-24T16:24:40.827279Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-10-24T16:24:40.827285Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-10-24T16:24:40.827291Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-10-24T16:24:40.827295Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-10-24T16:24:40.827300Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-10-24T16:24:40.827305Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-10-24T16:24:40.827309Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-10-24T16:24:40.827314Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-10-24T16:24:40.827318Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-10-24T16:24:40.827323Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-10-24T16:24:40.827327Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-10-24T16:24:40.827332Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-10-24T16:24:40.827336Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-10-24T16:24:40.827342Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-10-24T16:24:40.827347Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-10-24T16:24:40.827354Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-10-24T16:24:40.827359Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-10-24T16:24:40.827364Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-10-24T16:24:40.827369Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-10-24T16:24:40.827375Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-10-24T16:24:40.827380Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-10-24T16:24:40.827385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-10-24T16:24:40.827390Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-10-24T16:24:40.827394Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-10-24T16:24:40.827399Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-10-24T16:24:40.897819Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-10-24T16:24:40.913585Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> Worker::Basic [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> test.py::test[action-eval_filter--Results] [GOOD] >> TGroupMapperTest::Block42_1disk [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum |55.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut >> test.py::test[action-eval_unknown_cluster_name--Results] [SKIPPED] >> test.py::test[action-mixed_eval_typeof_world1--Results] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed |55.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |55.7%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut >> KqpBatchUpdate::SimplePartitions [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,0,0.5] [GOOD] >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,100,0] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> KqpScanArrowInChanels::JoinWithParams >> Cdc::InitialScanUpdatedRows >> BasicUsage::SimpleHandlers [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-10-24T16:24:46.576234Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825053448430394:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:46.576315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014f0/r3tmp/tmp426qeI/pdisk_1.dat 2025-10-24T16:24:46.640640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:46.640664Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:6977 TServer::EnableGrpc on GrpcPort 1170, node 1 2025-10-24T16:24:46.675160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:46.676152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:46.677343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:46.808893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:46.866268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:46.866280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:46.866291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:46.866350Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:47.157055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:47.166656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:24:47.195964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1761323087251 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-10-24T16:24:47.217037Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handshake: worker# [1:7564825057743398267:2421] 2025-10-24T16:24:47.217060Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handshake: worker# [1:7564825057743398267:2421] 2025-10-24T16:24:47.217786Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Create read session: session# [1:7564825057743398270:2295] 2025-10-24T16:24:47.217810Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:157: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-10-24T16:24:47.217813Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:161: [Worker][1:7564825057743398267:2421] Handshake with reader: sender# [1:7564825057743398268:2421] 2025-10-24T16:24:47.217819Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:47.217889Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:312: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-10-24T16:24:47.217950Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:387: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-10-24T16:24:47.217972Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:417: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Send handshake: worker# [1:7564825057743398267:2421] 2025-10-24T16:24:47.217981Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:157: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-10-24T16:24:47.217984Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:169: [Worker][1:7564825057743398267:2421] Handshake with writer: sender# [1:7564825057743398269:2421] 2025-10-24T16:24:47.221216Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_2853245306095410650_v1 } } 2025-10-24T16:24:47.575669Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:47.581226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:47.845912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825057743398435:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.847093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:47.847731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825057743398421:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.847778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:47.849132Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825057743398458:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:24:47.914798Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825057743398508:2522] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92 ... proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:24:48.532959Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-10-24T16:24:48.530000Z WriteTime: 2025-10-24T16:24:48.530000Z MessageGroupId: producer ProducerId: producer }] } } 2025-10-24T16:24:48.533008Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:226: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-10-24T16:24:48.530000Z WriteTime: 2025-10-24T16:24:48.530000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.533018Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-10-24T16:24:48.530000Z WriteTime: 2025-10-24T16:24:48.530000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.533053Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:565: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-10-24T16:24:48.533100Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-10-24T16:24:48.533111Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:596: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-10-24T16:24:48.533125Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-10-24T16:24:48.534323Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-10-24T16:24:48.534347Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:596: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-10-24T16:24:48.534357Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:579: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-10-24T16:24:48.534369Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:185: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.534380Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.705640Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-10-24T16:24:48.687000Z WriteTime: 2025-10-24T16:24:48.695000Z MessageGroupId: producer ProducerId: producer }] } } 2025-10-24T16:24:48.705665Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:226: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-10-24T16:24:48.687000Z WriteTime: 2025-10-24T16:24:48.695000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.705677Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-10-24T16:24:48.687000Z WriteTime: 2025-10-24T16:24:48.695000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.705710Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:565: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-10-24T16:24:48.705749Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-10-24T16:24:48.709614Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-10-24T16:24:48.709648Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:596: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-10-24T16:24:48.709662Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:579: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-10-24T16:24:48.709682Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:185: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.709698Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.824530Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-10-24T16:24:48.821000Z WriteTime: 2025-10-24T16:24:48.822000Z MessageGroupId: producer ProducerId: producer }] } } 2025-10-24T16:24:48.824553Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:226: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-10-24T16:24:48.821000Z WriteTime: 2025-10-24T16:24:48.822000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.824563Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:431: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-10-24T16:24:48.821000Z WriteTime: 2025-10-24T16:24:48.822000Z MessageGroupId: producer ProducerId: producer }] } 2025-10-24T16:24:48.824593Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:565: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-10-24T16:24:48.824625Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-10-24T16:24:48.826226Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7564825062038366366:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-10-24T16:24:48.826246Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:596: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-10-24T16:24:48.826260Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:579: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7564825057743398269:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-10-24T16:24:48.826270Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:185: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.826280Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-10-24T16:24:48.942846Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-10-24T16:24:48.942859Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7564825057743398268:2421] Leave 2025-10-24T16:24:48.942876Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:263: [Worker][1:7564825057743398267:2421] Reader has gone: sender# [1:7564825057743398268:2421]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2025-10-24T16:24:48.942889Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7564825062038366570:2421] Handshake: worker# [1:7564825057743398267:2421] 2025-10-24T16:24:48.943172Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7564825062038366570:2421] Create read session: session# [1:7564825062038366571:2295] 2025-10-24T16:24:48.943182Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:157: [Worker][1:7564825057743398267:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-10-24T16:24:48.943184Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:161: [Worker][1:7564825057743398267:2421] Handshake with reader: sender# [1:7564825062038366570:2421] 2025-10-24T16:24:48.943191Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7564825062038366570:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.7%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 2712, MsgBus: 19671 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00144d/r3tmp/tmpOXB7Hn/pdisk_1.dat 2025-10-24T16:23:43.314337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:43.570815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:43.570858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:43.575980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:43.576001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:43.576416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2712, node 1 2025-10-24T16:23:43.735453Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824782025241808:2081] 1761323023286842 != 1761323023286845 2025-10-24T16:23:43.744488Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:43.771151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:43.771162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:43.771163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:43.771440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:43.813967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19671 TClient is connected to server localhost:19671 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:23:44.294492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:44.393484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:44.419315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.564321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.726329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:44.816727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:23:45.549955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790615178067:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.550035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.550154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790615178077:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.550157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.606226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.623835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.640696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.654378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.670058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.696262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.713459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.730045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:45.751938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790615178940:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.751997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.752334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790615178946:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.752356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824790615178945:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:45.752371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Database ... PORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16770, node 16 2025-10-24T16:24:47.657953Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:47.657966Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:47.657968Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:47.658007Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2816 TClient is connected to server localhost:2816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:47.772544Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:47.776532Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:47.792999Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:47.840939Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:47.876368Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:47.894920Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:47.910766Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:48.050920Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564825063482701734:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.050956Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.051049Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564825063482701744:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.051054Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.064716Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.075066Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.086273Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.099808Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.114120Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.129312Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.145151Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.160237Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:48.181893Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564825063482702607:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.181917Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564825063482702612:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.181919Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.182038Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7564825063482702614:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.182057Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:48.182791Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:48.188005Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7564825063482702615:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:48.289096Z node 16 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [16:7564825063482702668:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:48.632002Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 1.537966095 Build = 29.01357417 Merge = 41.21001425 less-than assertion failed at ydb/core/viewer/viewer_ut.cpp:121, virtual void NTestSuiteViewer::TTestCaseTabletMerging::Execute_(NUnitTest::TTestContext &): timer.Passed() < 8 * BASE_PERF TBackTrace::Capture()+28 (0x14CC0D0C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+128 (0x14E7BFA0) NTestSuiteViewer::TTestCaseTabletMerging::Execute_(NUnitTest::TTestContext&)+2223 (0x14B27DBF) NTestSuiteViewer::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x14B74427) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0x14E7E178) NTestSuiteViewer::TCurrentTest::Execute()+421 (0x14B73D25) NUnitTest::TTestFactory::Execute()+817 (0x14E7E8C1) NUnitTest::RunMain(int, char**)+3229 (0x14E90D5D) ??+0 (0x7FF8F8A88D90) __libc_start_main+128 (0x7FF8F8A88E40) _start+41 (0x13CB2029) 2025-10-24T16:24:25.853259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:25.882615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:25.882684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:25.882718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:25.948673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:25.948718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:25.952798Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:25.953477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323064905992 != 1761323064905996 2025-10-24T16:24:25.990285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4562, node 1 TClient is connected to server localhost:21880 2025-10-24T16:24:26.069494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.069516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.069521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.069611Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":1},"NodeId":1,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"10","AvailableSize":"90","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[1],"Overall":"Red"}]} 2025-10-24T16:24:27.645992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:27.647654Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:24:27.656927Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:27.657024Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:27.657035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:27.896279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:27.896319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:27.899806Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:27.905181Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1761323066594350 != 1761323066594354 2025-10-24T16:24:27.940653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1025, node 2 TClient is connected to server localhost:24940 2025-10-24T16:24:28.166109Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:28.166127Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:28.166131Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:28.166233Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":2},"NodeId":2,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"90","AvailableSize":"10","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[2],"Overall":"Red"}]} 2025-10-24T16:24:30.737681Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:30.744215Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:30.744317Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:30.744335Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:30.852833Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:30.852881Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:30.863867Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:30.864480Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1761323068783370 != 1761323068783373 2025-10-24T16:24:30.904526Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7286, node 3 TClient is connected to server localhost:24203 2025-10-24T16:24:31.055819Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:31.055844Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:31.055860Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:31.055972Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-10-24T16:24:32.310363Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:32.325633Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:32.325658Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:32.325711Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:32.440888Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:32.440934Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:32.445558Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:32.445989Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1761323071545919 != 1761323071545923 2025-10-24T16:24:32.484455Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14759, node 4 TClient is connected to server localhost:29933 2025-10-24T16:24:32.553209Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:32.553233Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:32.553238Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:32.553416Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":4},"NodeId":4,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"10","AvailableSize":"90","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[4],"Overall":"Red"}]} 2025-10-24T16:24:33.846993Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:33.848880Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:24:33.850026Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:33.850120Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:33.850132Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:33.917194Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:33.917236Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:33.922475Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:33.923021Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1761323073097394 != 1761323073097398 2025-10-24T16:24:33.963078Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63691, node 5 TClient is connected to server localhost:9876 2025-10-24T16:24:34.046077Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:34.046098Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:34.046103Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:34.046208Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-10-24T16:24:36.610324Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:36.621662Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:36.621748Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:36.621761Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:36.755541Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:36.755584Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:36.759011Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:36.759496Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:34:2081] 1761323075004780 != 1761323075004783 2025-10-24T16:24:36.796402Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21246, node 6 TClient is connected to server localhost:14607 2025-10-24T16:24:36.991524Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:36.991545Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:36.991549Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:36.991609Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.8","Used":"80","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":6},"NodeId":6,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"80","AvailableSize":"20","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[6],"Overall":"Red"}]} 2025-10-24T16:24:39.404715Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:39.406490Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:24:39.407553Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:39.407605Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:39.407634Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:39.567611Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:39.567659Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:39.572162Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:39.572460Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:34:2081] 1761323077741241 != 1761323077741245 2025-10-24T16:24:39.612075Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1916, node 7 TClient is connected to server localhost:11359 2025-10-24T16:24:39.694466Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:39.694484Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:39.694488Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:39.694552Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":7},"NodeId":7,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"90","AvailableSize":"10","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[7],"Overall":"Red"}]} 2025-10-24T16:24:40.296643Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:24:40.296949Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7564825029608970743:2279];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:40.296981Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:24:40.374731Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:40.374760Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:40.378304Z node 8 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:40.379732Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [8:7564825029608970486:2081] 1761323080274206 != 1761323080274209 2025-10-24T16:24:40.389393Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:40.389693Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23435, node 8 2025-10-24T16:24:40.416121Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:40.416134Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:40.416136Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:40.416195Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18664 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 26047, MsgBus: 7346 2025-10-24T16:24:28.727679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824977117587138:2246];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:28.727722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:28.754321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00144e/r3tmp/tmpgLlK9d/pdisk_1.dat 2025-10-24T16:24:29.208923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:29.208945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:29.213156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:29.307404Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:29.311971Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824977117586929:2081] 1761323068712073 != 1761323068712076 TServer::EnableGrpc on GrpcPort 26047, node 1 2025-10-24T16:24:29.414680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:29.562621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:29.562633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:29.562635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:29.562675Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:29.729205Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7346 TClient is connected to server localhost:7346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:30.244738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:30.247582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:30.369690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:30.649048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:30.849036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:30.869461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:31.184679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824990002490475:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.184712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.185667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824990002490484:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.185690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.241702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.252930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.266350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.290689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.312515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.329950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.351786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.368055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:31.404142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824990002491348:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.404174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.404184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824990002491353:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:31.404217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824990002491355:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10- ... y maybe) 2025-10-24T16:24:49.048540Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:49.048592Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:49.055556Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11594 TClient is connected to server localhost:11594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:49.225138Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:49.232386Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:24:49.261814Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.288870Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:49.339115Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:49.353130Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:49.462164Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825068887076697:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.462187Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.462359Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825068887076707:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.462365Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.474014Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.484313Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.500330Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.529984Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.566232Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.587016Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.608976Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.638837Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:49.720117Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825068887077580:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.720144Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.720279Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825068887077585:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.720287Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825068887077586:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.720304Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:49.721360Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:49.726023Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:24:49.726080Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7564825068887077589:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:49.796956Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7564825068887077641:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:49.960088Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:50.479104Z node 13 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323090352, txId: 281474976715673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000cf3/r3tmp/tmpuFdAN0/pdisk_1.dat 2025-10-24T16:23:55.325553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:55.325858Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.356198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:55.356228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:55.359749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31418, node 1 2025-10-24T16:23:55.657251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:55.657265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:55.657267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:55.657309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:55.684994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.687276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-10-24T16:23:55.687780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:55.697655Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7564824835116559577:2295] 2025-10-24T16:23:55.699498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:23:55.729521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:23:55.729549Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:23:55.729699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:23:55.729705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:23:55.729709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:23:55.729758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:23:55.729764Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:23:55.729771Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7564824835116559594:2295] in generation 1 2025-10-24T16:23:55.730229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:23:55.741828Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:23:55.741868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:23:55.741887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7564824835116559596:2296] 2025-10-24T16:23:55.741890Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:55.741892Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:23:55.742379Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.742420Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:23:55.742438Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:23:55.742444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824835116559579:2310], serverId# [1:7564824835116559584:2313], sessionId# [0:0:0] 2025-10-24T16:23:55.742462Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.742467Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:55.742473Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:23:55.742477Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.742482Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:55.742535Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:23:55.742548Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:23:55.743200Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:55.744613Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:55.744630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:23:55.753476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824835116559610:2327], serverId# [1:7564824835116559612:2329], sessionId# [0:0:0] 2025-10-24T16:23:55.753487Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:55.755777Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1761323035801 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1761323035801 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:55.755783Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.755805Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.755811Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:55.755817Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1761323035801:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:55.756209Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1761323035801:281474976715657 keys extracted: 0 2025-10-24T16:23:55.756236Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:55.756250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.756259Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:23:55.759508Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:55.761133Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:55.761593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1761323035800 2025-10-24T16:23:55.761596Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.762530Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1761323035801} 2025-10-24T16:23:55.762540Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.762545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.762548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:55.762552Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:55.762563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1761323035801 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7564824835116559283:2149], exec latency: 4 ms, propose latency: 6 ms 2025-10-24T16:23:55.762572Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:23:55.762577Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.762588Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1761323035808 2025-10-24T16:23:55.763951Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7564824835116559596:2296][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-10-24T16:23:55.767909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:23:55.767919Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:23:55.778691Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPath ... 2075186224037891 2025-10-24T16:24:50.605702Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-10-24T16:24:50.605712Z node 25 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:24:50.605718Z node 25 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976710759] at 72075186224037891 for AlterTable 2025-10-24T16:24:50.605774Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-10-24T16:24:50.605778Z node 25 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:24:50.605782Z node 25 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [3500:281474976710759] at 72075186224037891 2025-10-24T16:24:50.605787Z node 25 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037891 version 2 2025-10-24T16:24:50.605936Z node 25 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:50.606525Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-10-24T16:24:50.606547Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:401:2400], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:50.606558Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2025-10-24T16:24:50.606570Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-10-24T16:24:50.606948Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2025-10-24T16:24:50.606960Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-10-24T16:24:50.631807Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-10-24T16:24:50.631839Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:50.631844Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:50.631882Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:401:2400], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:50.631898Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2025-10-24T16:24:50.631913Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:50.632346Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2025-10-24T16:24:50.632356Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:24:50.688128Z node 25 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.688156Z node 25 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.688161Z node 25 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.740122Z node 25 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.740151Z node 25 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.740156Z node 25 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.772127Z node 25 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.772156Z node 25 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.772162Z node 25 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.803556Z node 25 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.803585Z node 25 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.803589Z node 25 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.837804Z node 25 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.837835Z node 25 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.837839Z node 25 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.847249Z node 25 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgk0wp178bgys7be3scznr, Database: , SessionId: ydb://session/3?node_id=25&id=NmIyNTYzNGMtMjVkYzUzZGUtZWEzNzRmZWItY2RlZDJlZjE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:24:50.847464Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:8] at 72075186224037888 2025-10-24T16:24:50.847486Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2025-10-24T16:24:50.847497Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:800: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-10-24T16:24:50.847537Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:24:50.847570Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:24:50.847583Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:50.847629Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:911: SelfId: [25:1225:2890], Table: `/Root/Table` ([72057594046644480:2:3]), SessionActorId: [25:1168:2890]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1225:2890].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-10-24T16:24:50.847645Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [25:1218:2890], SessionActorId: [25:1168:2890], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1168:2890]. 2025-10-24T16:24:50.847690Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=25&id=NmIyNTYzNGMtMjVkYzUzZGUtZWEzNzRmZWItY2RlZDJlZjE=, ActorId: [25:1168:2890], ActorState: ExecuteState, TraceId: 01k8bgk0wp178bgys7be3scznr, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1308:2890] from: [25:1218:2890] 2025-10-24T16:24:50.847734Z node 25 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [25:1308:2890] TxId: 281474976715665. Ctx: { TraceId: 01k8bgk0wp178bgys7be3scznr, Database: , SessionId: ydb://session/3?node_id=25&id=NmIyNTYzNGMtMjVkYzUzZGUtZWEzNzRmZWItY2RlZDJlZjE=, PoolId: default, DatabaseId: /Root}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-10-24T16:24:50.847777Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:9] at 72075186224037888 2025-10-24T16:24:50.847785Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:461: Skip empty write operation for [0:9] at 72075186224037888 2025-10-24T16:24:50.847809Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:50.847828Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=25&id=NmIyNTYzNGMtMjVkYzUzZGUtZWEzNzRmZWItY2RlZDJlZjE=, ActorId: [25:1168:2890], ActorState: ExecuteState, TraceId: 01k8bgk0wp178bgys7be3scznr, Create QueryResponse for error on request, msg: >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-10-24T16:24:50.848370Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:24:50.848384Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:24:50.848503Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-10-24T16:24:50.848524Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2025-10-24T16:24:50.848531Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-10-24T16:24:50.848543Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:03.000000Z 2025-10-24T16:24:50.848550Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2025-10-24T16:24:50.848566Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 1 blobs 2025-10-24T16:24:50.848578Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2025-10-24T16:24:50.848663Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:24:50.848683Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |55.7%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |55.7%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-10-24T16:23:56.484340Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1761323036484331 2025-10-24T16:23:58.135467Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:58.159238Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824847078816567:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:58.159261Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00155d/r3tmp/tmpjDLzrf/pdisk_1.dat 2025-10-24T16:23:58.204081Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:58.668329Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:58.668155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:58.668191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:58.701664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:58.701687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:58.703079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:58.703094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:58.751587Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:58.755047Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:58.755192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:58.759027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13556, node 1 2025-10-24T16:23:58.826230Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:58.844302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00155d/r3tmp/yandexScjCEf.tmp 2025-10-24T16:23:58.844321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00155d/r3tmp/yandexScjCEf.tmp 2025-10-24T16:23:58.844395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00155d/r3tmp/yandexScjCEf.tmp 2025-10-24T16:23:58.844423Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:58.849737Z INFO: TTestServer started on Port 27349 GrpcPort 13556 TClient is connected to server localhost:27349 PQClient connected to localhost:13556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:58.895140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:23:58.941765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:23:58.968260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... 2025-10-24T16:23:59.028449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-10-24T16:23:59.106358Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:59.194240Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:00.095769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:00.094022Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824855668751374:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.094060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824855668751406:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.094070Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.094437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824855668751409:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.094449Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.102417Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824855668751408:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:24:00.178472Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824855668751440:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:00.218263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:00.219339Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824856341704353:2331], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:00.218420Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824855668751447:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:00.219129Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=Mjc3MjhiZWYtMTA5ODhiM2YtYTQ3NjhiNzktZWU0ODM5Mjg=, ActorId: [2:7564824855668751368:2295], ActorState: ExecuteState, TraceId: 01k8bghfax6vvmsd1ttdgprxva, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:00.219652Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:00.220049Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=ZDg3N2Q0NjUtODFjM2JkMjYtZWU2YTMxNDMtMjc2ZWUzNw==, ActorId: [1:7564824856341704304:2324], ActorState: ExecuteState, TraceId: 01k8bghfcwe5m5h0pz2a0jr5gt, ReplyQueryCompileError, status SCHEME_ERROR remove ... adCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:50.854690Z :INFO: [/Root] [/Root] [c60519b4-9a8df6cb-29bc1996-86d57acb] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:50.854694Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:50.854696Z :INFO: [/Root] [/Root] [c60519b4-9a8df6cb-29bc1996-86d57acb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 813 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:50.854701Z :INFO: [/Root] [/Root] [c60519b4-9a8df6cb-29bc1996-86d57acb] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:50.854703Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:50.854706Z :INFO: [/Root] [/Root] [c60519b4-9a8df6cb-29bc1996-86d57acb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 813 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:50.854722Z :NOTICE: [/Root] [/Root] [c60519b4-9a8df6cb-29bc1996-86d57acb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:50.854750Z :INFO: [/Root] [/Root] [8dde4d82-d703d77d-fd4ba277-8bd49d38] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:50.854752Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-10-24T16:24:50.854755Z :INFO: [/Root] [/Root] [8dde4d82-d703d77d-fd4ba277-8bd49d38] Counters: { Errors: 0 CurrentSessionLifetimeMs: 814 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:50.854759Z :NOTICE: [/Root] [/Root] [8dde4d82-d703d77d-fd4ba277-8bd49d38] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:50.854772Z :INFO: [/Root] [/Root] [aeb9886b-63cd698-3427f17b-445965ad] Closing read session. Close timeout: 0.000000s 2025-10-24T16:24:50.854774Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:24:50.854777Z :INFO: [/Root] [/Root] [aeb9886b-63cd698-3427f17b-445965ad] Counters: { Errors: 0 CurrentSessionLifetimeMs: 825 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:24:50.854780Z :NOTICE: [/Root] [/Root] [aeb9886b-63cd698-3427f17b-445965ad] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:24:50.855899Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_6908905956187492829_v1 grpc read done: success# 0, data# { } 2025-10-24T16:24:50.855911Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_6908905956187492829_v1 grpc read failed 2025-10-24T16:24:50.855918Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer shared/user session shared/user_3_2_6908905956187492829_v1 grpc closed 2025-10-24T16:24:50.855925Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_6908905956187492829_v1 is DEAD 2025-10-24T16:24:50.855951Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_10424825848804205986_v1 grpc read done: success# 0, data# { } 2025-10-24T16:24:50.855953Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_10424825848804205986_v1 grpc read failed 2025-10-24T16:24:50.855958Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_10424825848804205986_v1 grpc closed 2025-10-24T16:24:50.855964Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_10424825848804205986_v1 is DEAD 2025-10-24T16:24:50.856094Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_4845516052970369408_v1 grpc read done: success# 0, data# { } 2025-10-24T16:24:50.856095Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_4845516052970369408_v1 grpc read failed 2025-10-24T16:24:50.856098Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_4845516052970369408_v1 grpc closed 2025-10-24T16:24:50.856104Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_4845516052970369408_v1 is DEAD 2025-10-24T16:24:50.856119Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|f4373b21-fb1bcfc9-8995b143-5df7fe9c_0 grpc read done: success: 0 data: 2025-10-24T16:24:50.856121Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|f4373b21-fb1bcfc9-8995b143-5df7fe9c_0 grpc read failed 2025-10-24T16:24:50.856128Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|f4373b21-fb1bcfc9-8995b143-5df7fe9c_0 grpc closed 2025-10-24T16:24:50.856131Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|f4373b21-fb1bcfc9-8995b143-5df7fe9c_0 is DEAD 2025-10-24T16:24:50.856254Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:24:50.856673Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782888:2474] disconnected. 2025-10-24T16:24:50.856684Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782888:2474] disconnected; active server actors: 1 2025-10-24T16:24:50.856688Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782888:2474] client user disconnected session shared/user_3_2_6908905956187492829_v1 2025-10-24T16:24:50.856700Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1186: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-10-24T16:24:50.856708Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782884:2473] disconnected. 2025-10-24T16:24:50.856710Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782884:2473] disconnected; active server actors: 1 2025-10-24T16:24:50.856714Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782884:2473] client user disconnected session shared/user_3_1_10424825848804205986_v1 2025-10-24T16:24:50.856726Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782887:2475] disconnected. 2025-10-24T16:24:50.856728Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782887:2475] disconnected; active server actors: 1 2025-10-24T16:24:50.856730Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825071127782887:2475] client user disconnected session shared/user_3_3_4845516052970369408_v1 2025-10-24T16:24:50.859108Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_10424825848804205986_v1 2025-10-24T16:24:50.859153Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564825071127782893:2483] destroyed 2025-10-24T16:24:50.859163Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564825071127782930:2472] destroyed 2025-10-24T16:24:50.859177Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:24:50.859190Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.859194Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.859197Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.859239Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_10424825848804205986_v1 2025-10-24T16:24:50.896195Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.896208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.896211Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.997521Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.997536Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.997539Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.101493Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.101507Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.101510Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.209015Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1006: ActorId: [3:7564825075422750287:2488] TxId: 281474976720678. Ctx: { TraceId: 01k8bgk13eat4bhmq5cex9kykr, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzYwNGE3ZmYtYTRiODUyOTEtN2UzZjhmZGItYTJiNzM4NmU=, PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-10-24T16:24:51.209079Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [3:7564825075422750296:2488], TxId: 281474976720678, task: 3. Ctx: { CheckpointId : . TraceId : 01k8bgk13eat4bhmq5cex9kykr. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzYwNGE3ZmYtYTRiODUyOTEtN2UzZjhmZGItYTJiNzM4NmU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7564825075422750287:2488], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[optimizers-yql-20264-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter |55.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits >> test.py::test[action-mixed_eval_typeof_world1--Results] [GOOD] >> test.py::test[aggr_factory-every-default.txt-Results] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-10-24T16:23:58.689620Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1761323038689612 2025-10-24T16:24:00.149663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824854607045509:2095];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.149689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00153f/r3tmp/tmpSGwP2L/pdisk_1.dat 2025-10-24T16:24:00.284856Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:00.309768Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:00.455425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.455468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.460179Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:00.464694Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:00.464989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21190, node 1 2025-10-24T16:24:00.543684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00153f/r3tmp/yandexSaU9vx.tmp 2025-10-24T16:24:00.543702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00153f/r3tmp/yandexSaU9vx.tmp 2025-10-24T16:24:00.543927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00153f/r3tmp/yandexSaU9vx.tmp 2025-10-24T16:24:00.543979Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:00.544156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.544177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.546644Z INFO: TTestServer started on Port 4276 GrpcPort 21190 2025-10-24T16:24:00.551149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.552874Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.552549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4276 PQClient connected to localhost:21190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:00.899350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:00.906494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-10-24T16:24:00.962144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854607046399:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.962170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.962271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854607046412:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.962278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824854607046411:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.962282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:00.963446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:01.020019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824854607046415:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:24:01.112417Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824858902013797:2662] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:01.268050Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:01.355087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:01.366361Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824858065224986:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:01.367194Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=YzBjNWM4NC00MzhiNzZmNS1iZGMxODQyYi1jNjQ0Yzc0Ng==, ActorId: [2:7564824858065224953:2295], ActorState: ExecuteState, TraceId: 01k8bghg734z8pdt04yfammnx8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:01.368395Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:01.368319Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824858902013807:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:01.368512Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NDVmNDE2YzktYjY0ZTg4MzctYzYxZDZiMGQtODZlMTRjMTU=, ActorId: [1:7564824854607046397:2322], ActorState: ExecuteState, TraceId: 01k8bghg6146sq0zhgv2449w4n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:01.368574Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:01.373530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:01.493349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 7 ... ter ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.137576Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.239039Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.239053Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.239056Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.342445Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.342459Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.342462Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.443167Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.443182Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.443185Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.543927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.543944Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.543946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.647967Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.647986Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.647989Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.750390Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.750408Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.750411Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.851688Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.851721Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.851725Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:50.952062Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:50.952082Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:50.952084Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.013531Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-10-24T16:24:51.013559Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-10-24T16:24:51.016102Z node 4 :PERSQUEUE DEBUG: partition.cpp:1016: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-10-24T16:24:51.018709Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-10-24T16:24:51.018888Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2025-10-24T16:24:51.019556Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:404: [72075186224037893][rt3.dc1--test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 3, Generation 2 2025-10-24T16:24:51.056244Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.056263Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.056266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.092110Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:24:51.156847Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.156866Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.156869Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.259988Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.260004Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.260008Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.364108Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.364122Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.364125Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.467705Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.467721Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.467724Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.570381Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.570402Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.570405Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.675072Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.675089Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.675093Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:24:51.776119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.776132Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.776135Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist === Waiting for repair >>> Ready to answer: ok 2025-10-24T16:24:51.879914Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.879935Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.879938Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist === Closing the session 2025-10-24T16:24:51.892219Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:31476" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:31476" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:31476" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } 2025-10-24T16:24:51.902528Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-10-24T16:24:51.902795Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-10-24T16:24:51.904164Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-10-24T16:24:51.904174Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-10-24T16:24:51.904189Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-10-24T16:24:51.904329Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-10-24T16:24:51.904344Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session is aborting and will not restart 2025-10-24T16:24:51.904366Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-10-24T16:24:51.987944Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:24:51.987969Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:24:51.987973Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist |55.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |55.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |55.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> KqpOlapJson::EmptyVariants[2,true,1,1000,100,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,100,0.5] >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 27022, MsgBus: 14918 2025-10-24T16:24:26.228886Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824966223789672:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:26.228950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00148b/r3tmp/tmpU18JNy/pdisk_1.dat 2025-10-24T16:24:26.500572Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:26.502262Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824966223789430:2081] 1761323066213819 != 1761323066213822 2025-10-24T16:24:26.503309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.503332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.503981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.504270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27022, node 1 2025-10-24T16:24:26.523570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.523584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.523586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.523628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14918 2025-10-24T16:24:26.695395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:26.902740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:26.928593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:24:26.948415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.203764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.224173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:27.731254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:27.915731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.192493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824979108692989:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.192530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.192725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824979108692999:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.192733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.764580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.787256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.818045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.831552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.844155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.859375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.887289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:29.971640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.081470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824983403661168:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.081506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.081557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824983403661173:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.081565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824983403661174:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have acces ... :24:51.452791Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65508 2025-10-24T16:24:51.635122Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:65508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:51.666698Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:51.674100Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:51.682212Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:51.695375Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:397} StateFunc too long Type# 268639258 Duration# 0.009195s 2025-10-24T16:24:51.714508Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:24:51.762823Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.783715Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:51.891393Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825077376566738:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:51.891419Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:51.891490Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825077376566747:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:51.891501Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:51.904961Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.913239Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.920969Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.935640Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.950239Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.967584Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:51.985496Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:52.001079Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:52.032646Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825081671534907:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:52.032673Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:52.032755Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825081671534913:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:52.032759Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:52.032764Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825081671534912:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:52.033717Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:52.038657Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564825081671534916:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:24:52.096897Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564825081671534968:3558] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:52.380022Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:52.553298Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323092592, txId: 281474976710673] shutting down 2025-10-24T16:24:52.609567Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323092641, txId: 281474976710675] shutting down |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> test_auditlog.py::test_single_dml_query_logged[upsert] >> test_canonical_records.py::test_replace_config |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME >> TExportToS3Tests::CancelledExportEndTime [GOOD] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream >> KqpOlapJson::EmptyVariants[2,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0.5] >> test.py::test[select-dict_with_few_keys-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:39.551864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:39.551897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:39.551903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:39.551911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:39.551918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:39.551923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:39.551935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:39.551949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:39.552086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:39.552155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:39.570100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:39.570124Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:39.574044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:39.574078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:39.574116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:39.575643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:39.575827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:39.575963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:39.576051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:39.577114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:39.577156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:39.577406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:39.577418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:39.577446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:39.577455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:39.577462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:39.577489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.578992Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:39.606187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:39.606266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.606320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:39.606329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:39.606407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:39.606425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:39.607073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:39.607117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:39.607184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.607196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:39.607203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:39.607209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:39.607680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.607696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:39.607703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:39.608128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.608142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:39.608149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:39.608158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:39.608919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:39.609412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:39.609456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:39.609689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:39.609721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:39.609730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:39.609802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:39.609812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:39.609840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:39.609853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:39.610337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:39.610348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 00002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: true EnablePermissions: true } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-10-24T16:24:56.563727Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.563754Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:24:56.563760Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976710759:0 type: TxBackup target path: [OwnerId: 72057594046678944, LocalPathId: 4] source path: 2025-10-24T16:24:56.563830Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:56.563840Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:563) 2025-10-24T16:24:56.564440Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:56.564487Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-10-24T16:24:56.564526Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-10-24T16:24:56.564533Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-10-24T16:24:56.564569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.564578Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-10-24T16:24:56.564584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-10-24T16:24:56.564588Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710759:0 2 -> 3 2025-10-24T16:24:56.564936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.564944Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.564965Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_backup.cpp:41: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-10-24T16:24:56.565270Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-10-24T16:24:56.565297Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-10-24T16:24:56.565775Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-10-24T16:24:56.565790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-10-24T16:24:56.566046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566069Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566074Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:58: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566091Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_backup.cpp:41: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-10-24T16:24:56.566385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 TestWaitNotification wait txId: 102 2025-10-24T16:24:56.566590Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:24:56.566597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:24:56.566626Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710759, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-10-24T16:24:56.566636Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710759, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566649Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7218: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566662Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7220: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-10-24T16:24:56.566782Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:62: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:24:56.566787Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:70: NotifyTxCompletion, export is ready to notify, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:24:56.570707Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:24:56.570726Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:583:2539] TestWaitNotification: OK eventTxId 102 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords |55.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |55.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |55.9%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-not_all_fail--Results] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement |55.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |55.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |55.9%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0.5] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test.py::test[insert_monotonic-not_all_fail--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--Results] |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63053, MsgBus: 32202 2025-10-24T16:24:40.769047Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825030457447238:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:40.769131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001269/r3tmp/tmpueikS6/pdisk_1.dat 2025-10-24T16:24:40.829014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:40.847297Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:40.847780Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825030457447119:2081] 1761323080764698 != 1761323080764701 TServer::EnableGrpc on GrpcPort 63053, node 1 2025-10-24T16:24:40.863798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:40.863817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:40.863819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:40.863890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:40.880674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:40.880714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:40.888297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32202 TClient is connected to server localhost:32202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:40.984940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:24:41.038508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:41.552607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825034752415095:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:41.552645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:41.552851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825034752415105:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:41.552862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:41.671075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:24:41.731949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:24:41.732336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:24:41.732440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:24:41.732690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:24:41.732961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:24:41.732986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:24:41.733011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:24:41.733037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:24:41.733068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:24:41.733278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:24:41.733298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:24:41.733316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:24:41.733333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825034752415169:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:24:41.740932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:24:41.740954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:24:41.740969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:24:41.740975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:24:41.741095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:24:41.741101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:24:41.741112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:24:41.741117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:24:41.741126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:24:41.741134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:24:41.741141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... iled to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.045149Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.045309Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825107833227229:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.045320Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825107833227230:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.045326Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.046226Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:58.049451Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-10-24T16:24:58.049528Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825107833227233:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:24:58.120830Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825107833227284:2496] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:58.165621Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:24:58.165707Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (11u), (12u), (13u), (14u) 2025-10-24T16:24:58.238690Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:24:58.238774Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:24:58.946858Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:24:58.946878Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:24:58.946887Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:24:58.947115Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098010;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:24:58.947138Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098045;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:24:58.947144Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098059;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:24:58.947269Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825103538259705:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323098080;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761323098080 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:24:58.960455Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:24:58.960474Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:24:58.960481Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:24:58.960730Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098010;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:24:58.960756Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098045;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:24:58.960761Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323098059;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:24:58.960878Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825103538259704:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323098080;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761323098080 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] OUTPUT: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[[""]];[[""]];[[""]];[[""]]] INDEX:0/0/0 HEADER:0/0/0 |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[select-optional_as_warn-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 9671, MsgBus: 1607 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014c4/r3tmp/tmpF04MY3/pdisk_1.dat 2025-10-24T16:24:26.396443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:26.411241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:26.411273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:26.416207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:26.492589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:26.500242Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824968720637057:2081] 1761323066219639 != 1761323066219642 2025-10-24T16:24:26.502177Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9671, node 1 2025-10-24T16:24:26.705930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:26.812232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:26.812243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:26.812245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:26.812294Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:27.236163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1607 TClient is connected to server localhost:1607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:27.866023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:24:27.903197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:24:27.918349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:28.247684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:28.778416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.005526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:24:29.848779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824981605540628:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.848847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.853761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824981605540638:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:29.853793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.147120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.176257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.214924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.249492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.305077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.389160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.464143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.547950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:30.710461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824985900508799:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.710490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.710625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824985900508805:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.710632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824985900508804:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:30.710638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: ... OUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.896025Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825103883939502:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.896072Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7564825103883939503:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.896184Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:58.898299Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:58.903010Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7564825103883939506:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:24:58.994908Z node 12 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [12:7564825103883939558:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:59.168964Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 18676, MsgBus: 65190 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0014c4/r3tmp/tmpba5C5T/pdisk_1.dat 2025-10-24T16:24:59.831910Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:59.832036Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.848195Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:59.855935Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:7564825110967339147:2081] 1761323099817448 != 1761323099817451 2025-10-24T16:24:59.856154Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:59.856178Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:59.860585Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18676, node 13 2025-10-24T16:24:59.872548Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:24:59.872565Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:24:59.872567Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:24:59.872621Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65190 TClient is connected to server localhost:65190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:25:00.031787Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:25:00.036292Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:25:00.045177Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:25:00.065044Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:00.417634Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825115262307544:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:00.417676Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:00.417855Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825115262307578:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:00.417861Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7564825115262307580:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:00.417870Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:00.418778Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:25:00.422059Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-10-24T16:25:00.422123Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7564825115262307583:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:25:00.493336Z node 13 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [13:7564825115262307634:2649] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=560;columns=4; |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> test.py::test[optimizers-yql-2582_limit_for_join_input_other--Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |56.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |56.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |56.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test.py::test[aggr_factory-every-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-median-default.txt-Results] |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] |56.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |56.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |56.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |56.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TExportToS3Tests::CompletedExportEndTime [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-10-24T16:23:45.486932Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.487108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.487302Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.487488Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.487619Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.487784Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.488011Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.488113Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.489191Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:45.542270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.542585Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.542615Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.543772Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.543791Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:45.544772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3046:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.545141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.545217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.545908Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3068:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.546163Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.546374Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.546517Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3060:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.546922Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.546962Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3064:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.547110Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547159Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1348:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.547296Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3043:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.547323Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3051:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.547345Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547515Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547614Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547663Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547716Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547805Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547817Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547825Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547972Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.547999Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:3072:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.548010Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:45.548066Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3056:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:45.548149Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:45.548188Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:45.672504Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:45.734486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.734540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.734966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.734980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.735038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.735102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.735172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.735237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:45.735299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:45.735355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE ... TCleanupTablesActor] ActorId: [33:3046:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.811066Z node 35 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [35:1580:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.811155Z node 30 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811194Z node 32 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [32:3042:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.811372Z node 33 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811389Z node 35 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811509Z node 34 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [34:1335:2188], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.811528Z node 29 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811538Z node 29 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811554Z node 30 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811601Z node 35 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811743Z node 32 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811761Z node 33 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811920Z node 31 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [31:3038:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.811956Z node 32 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.811972Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.812182Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.812269Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:24:59.812281Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.812405Z node 36 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [36:1584:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:24:59.812577Z node 36 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:24:59.812642Z node 36 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:25:00.030501Z node 28 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:00.133984Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.134043Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135252Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135280Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135348Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135363Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135424Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135434Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135493Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135506Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135555Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135567Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135621Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135632Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135684Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135693Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.135739Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.135749Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.210915Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 29 Cookie 29 2025-10-24T16:25:00.211151Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2025-10-24T16:25:00.211224Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 31 Cookie 31 2025-10-24T16:25:00.211257Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 32 Cookie 32 2025-10-24T16:25:00.211296Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 33 Cookie 33 2025-10-24T16:25:00.211336Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2025-10-24T16:25:00.211369Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2025-10-24T16:25:00.211407Z node 28 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2025-10-24T16:25:00.211530Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.211748Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.211795Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.211874Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.211919Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.211965Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.212019Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.212083Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.212123Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.278982Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:25:00.324169Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:25:00.504012Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20166, node 28 TClient is connected to server localhost:30915 2025-10-24T16:25:00.593760Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:25:00.593788Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:25:00.593793Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:25:00.593968Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> TExportToS3Tests::Checksums ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-10-24T16:23:10.793567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:11.098265Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:11.098374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:11.098632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:23:11.568751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:11.569019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:11.580227Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:11.584943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761322988413325 != 1761322988413329 2025-10-24T16:23:11.620870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26956, node 1 TClient is connected to server localhost:21394 2025-10-24T16:23:11.812567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:11.812586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:11.812590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:11.815797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-10-24T16:23:40.236125Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.236251Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.236352Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.236549Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.236716Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.236875Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.237117Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.237596Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.238553Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:40.257137Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.257573Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.258042Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.258374Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.258385Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.258393Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:23:40.258976Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3073:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.259274Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.259312Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.259994Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1578:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.260259Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.260440Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.260678Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3078:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.260926Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261178Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3070:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.261197Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261228Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3083:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.261255Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3087:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.261533Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261562Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261579Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261721Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261739Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261765Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3031:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.261865Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.261979Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1333:2186], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.262017Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:40.262110Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.262203Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:23:40.262240Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1582:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:40.262349Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... apshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.368569Z node 44 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [44:1966:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.368608Z node 45 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [45:1970:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369073Z node 39 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [39:3059:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369112Z node 40 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [40:3067:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369163Z node 44 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369184Z node 45 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369356Z node 41 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [41:3072:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369384Z node 42 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [42:3076:2370], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369407Z node 44 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369416Z node 45 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369571Z node 39 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369597Z node 40 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369684Z node 46 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [46:1974:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:00.369828Z node 39 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369845Z node 40 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369854Z node 41 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.369872Z node 42 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.370011Z node 41 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.370022Z node 42 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:25:00.370040Z node 46 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:00.370102Z node 46 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-10-24T16:25:00.573160Z node 38 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:00.659226Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.659277Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660554Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660584Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660649Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660660Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660707Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660720Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660781Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660791Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660833Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660845Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660893Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660903Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.660956Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.660969Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.661018Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:00.661028Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:00.731069Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 39 Cookie 39 2025-10-24T16:25:00.731287Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 40 Cookie 40 2025-10-24T16:25:00.731372Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 41 Cookie 41 2025-10-24T16:25:00.731406Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 42 Cookie 42 2025-10-24T16:25:00.731442Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 43 Cookie 43 2025-10-24T16:25:00.731494Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 44 Cookie 44 2025-10-24T16:25:00.731529Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 45 Cookie 45 2025-10-24T16:25:00.731565Z node 38 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 46 Cookie 46 2025-10-24T16:25:00.731668Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.731890Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.731950Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(44, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.731987Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.732025Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.732081Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.732120Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.732226Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(46, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.732272Z node 38 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(45, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:00.854931Z node 38 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-10-24T16:25:00.888811Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-10-24T16:25:01.004285Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 24464, node 38 TClient is connected to server localhost:21461 2025-10-24T16:25:01.087384Z node 38 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:25:01.087407Z node 38 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:25:01.087412Z node 38 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:25:01.087626Z node 38 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |56.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> test_canonical_records.py::test_replace_config [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[select-optional_as_warn-default.txt-Results] [GOOD] >> test.py::test[select-refselect-1000-Results] [SKIPPED] >> test.py::test[select-scalar_subquery-default.txt-Results] |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> TExportToS3Tests::Changefeeds |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.2%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> TExportToS3Tests::Changefeeds [GOOD] >> test_secrets_usage.py::test_create_eds_with_many_secrets_with_success ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003393/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit_log.ngdv89po.txt 2025-10-24T16:24:55.968521Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-10-24T16:24:55.970900Z: {"paths":"[/Root/users/database]","tx_id":"281474976715660","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2025-10-24T16:24:56.015979Z: {"paths":"[/Root/users/database]","tx_id":"281474976715661","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2025-10-24T16:24:57.977818Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-10-24T16:24:58.156064Z: {"paths":"[.metadata/workload_manager/pools/default]","tx_id":"281474976720657","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-10-24T16:24:58.286708Z: {"reason":"Check failed: path: '/Root/users/database/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92","paths":"[default]","tx_id":"281474976720658","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-10-24T16:25:00.233666Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2025-10-24T16:25:00.240275Z: {"paths":"[/Root/users/database]","tx_id":"281474976715662","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2025-10-24T16:25:00.292399Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |56.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] |56.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |56.2%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |56.3%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:36.625724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:36.626029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:36.626035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:36.626043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:36.626050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:36.626054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:36.626063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:36.626076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:36.627097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:36.628017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:36.803173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:36.803198Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:36.812359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:36.812391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:36.812424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:36.813808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:36.813971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:36.814092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.814170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:36.815149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:36.815193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:36.815452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:36.815461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:36.815487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:36.815494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:36.815500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:36.815523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.816889Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:36.840654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:36.840734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.840787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:36.840796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:36.840860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:36.840879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:36.841481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.841520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:36.841574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.841583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:36.841588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:36.841593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:36.841970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.841981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:36.841987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:36.842302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.842315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:36.842322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.842328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:36.842966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:36.843361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:36.843418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:36.843639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:36.843669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:36.843677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.843742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:36.843749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:36.843776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:36.843789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:36.844209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:36.844217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 4046678944, LocalPathId: 1], version: 12 2025-10-24T16:25:07.413652Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:25:07.413725Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:07.413735Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:07.413739Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:25:07.413743Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-10-24T16:25:07.413746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-10-24T16:25:07.413755Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-10-24T16:25:07.413761Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:139:2161] 2025-10-24T16:25:07.414346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:07.414429Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:07.414446Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:25:07.414457Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:25:07.414464Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:07.414469Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-10-24T16:25:07.414475Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-10-24T16:25:07.414845Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:07.414869Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:25:07.414877Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [5:1251:3034] TestWaitNotification: OK eventTxId 105 2025-10-24T16:25:07.415104Z node 5 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:25:07.415524Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:07.415535Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:736: TExport::TTxProgress: Resume: id# 105 2025-10-24T16:25:07.415560Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:534: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:25:07.415569Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:07.415585Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2025-10-24T16:25:07.415588Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:07.415592Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:856: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2025-10-24T16:25:07.415596Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:526: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-10-24T16:25:07.415608Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:07.416322Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-10-24T16:25:07.416347Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-10-24T16:25:07.416382Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2025-10-24T16:25:07.416920Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:07.416977Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/export-105 2025-10-24T16:25:07.417004Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-10-24T16:25:07.417012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761 2025-10-24T16:25:07.417017Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:07.417021Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:917: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-10-24T16:25:07.417026Z node 5 :EXPORT TRACE: schemeshard_export__create.cpp:918: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000010, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 9 PathDropTxId: 281474976710761 2025-10-24T16:25:07.417042Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:1099: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 9] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-10-24T16:25:07.417343Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:07.417365Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:07.417378Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:25:07.417386Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:25:07.417389Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:07.417392Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-10-24T16:25:07.417395Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-10-24T16:25:07.417706Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2025-10-24T16:25:07.417768Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-10-24T16:25:07.417880Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-10-24T16:25:07.417985Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-10-24T16:25:07.418005Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:25:07.418010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [5:1469:3248] TestWaitNotification: OK eventTxId 105 >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_secrets_usage.py::test_create_eds_with_single_secret_with_success [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-10-24T16:23:55.248811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824837258213733:2085];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:55.248986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d30/r3tmp/tmpP5owAK/pdisk_1.dat 2025-10-24T16:23:55.372393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:55.438692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:55.438714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:55.531711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:55.544174Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:55.548010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824837258213671:2081] 1761323035238891 != 1761323035238894 2025-10-24T16:23:55.548029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11656, node 1 2025-10-24T16:23:55.831084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:55.831099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:55.831101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:55.831144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:55.971783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.974350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-10-24T16:23:55.974890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:56.039544Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7564824841553181612:2295] 2025-10-24T16:23:56.042100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:23:56.087439Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:23:56.087462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:23:56.087602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:23:56.087607Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:23:56.087611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:23:56.087661Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:23:56.087668Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:23:56.087675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7564824841553181627:2295] in generation 1 2025-10-24T16:23:56.091643Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:23:56.162348Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:23:56.162397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:23:56.162417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7564824841553181629:2296] 2025-10-24T16:23:56.162420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:56.162423Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:23:56.162426Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:56.162474Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:23:56.162491Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:23:56.162494Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:56.162498Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:56.162506Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:23:56.162508Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:56.166428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824837258214314:2309], serverId# [1:7564824841553181614:2310], sessionId# [0:0:0] 2025-10-24T16:23:56.166452Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:56.166508Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:23:56.166523Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:23:56.168549Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:56.176332Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:56.176358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:23:56.178235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824841553181643:2326], serverId# [1:7564824841553181644:2327], sessionId# [0:0:0] 2025-10-24T16:23:56.190576Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1761323036221 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1761323036221 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:56.190592Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:56.191377Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:56.191394Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:56.191400Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:56.191409Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1761323036221:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:56.191733Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1761323036221:281474976715657 keys extracted: 0 2025-10-24T16:23:56.192038Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:56.192068Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:56.192076Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:23:56.194116Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:56.200877Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:56.201366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1761323036220 2025-10-24T16:23:56.201371Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:56.201418Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1761323036221} 2025-10-24T16:23:56.201428Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:56.201437Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1761323036228 2025-10-24T16:23:56.207964Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:56.207975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:56.207979Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:56.207995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1761323036221 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7564824837258214018:2144], exec latency: 2 ms, propose latency: 16 ms 2025-10-24T16:23:56.208007Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify ... ecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-10-24T16:25:08.460708Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:08.460721Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:08.460773Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-10-24T16:25:08.460804Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:08.460809Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:08.460824Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-10-24T16:25:08.460838Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:637: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-10-24T16:25:08.460856Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.460866Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.460872Z node 30 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:25:08.460892Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1248: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-10-24T16:25:08.460902Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.460913Z node 30 :PERSQUEUE INFO: partition_write.cpp:1710: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-10-24T16:25:08.460931Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-10-24T16:25:08.460965Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-10-24T16:25:08.461011Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 3 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000? size 93 WTime 7451 2025-10-24T16:25:08.461041Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:25:08.461059Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-10-24T16:25:08.461302Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [30:928:2707] 2025-10-24T16:25:08.461326Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' size 93 2025-10-24T16:25:08.461337Z node 30 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:25:08.472104Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:490: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-10-24T16:25:08.472157Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:25:08.472180Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:25:08.472201Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-10-24T16:25:08.472237Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.472243Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.472248Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.472263Z node 30 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:25:08.472290Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-10-24T16:25:08.472381Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1061:2765] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-10-24T16:25:08.472404Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][30:979:2765] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-10-24T16:25:08.472448Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-10-24T16:25:08.472455Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-10-24T16:25:08.488198Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 2025-10-24T16:25:08.512112Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.512148Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.512155Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.561231Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.561262Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.561266Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.582739Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.582784Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.582789Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.603413Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.603448Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.603453Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:08.624140Z node 30 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:08.624167Z node 30 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:08.624171Z node 30 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-10-24T16:25:08.634789Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:08.634816Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:08.634879Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-10-24T16:25:08.634915Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2025-10-24T16:25:08.634921Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-10-24T16:25:08.634938Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2025-10-24T16:25:08.634945Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:07.000000Z 2025-10-24T16:25:08.634954Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2025-10-24T16:25:08.634979Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2025-10-24T16:25:08.634985Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:08.635001Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 2 blobs 2025-10-24T16:25:08.635034Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2025-10-24T16:25:08.635051Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-10-24T16:25:08.635175Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-10-24T16:23:54.399591Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824832556319090:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:54.399614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000d48/r3tmp/tmprGVGun/pdisk_1.dat 2025-10-24T16:23:54.924251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:54.925116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:54.925133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:54.937498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:54.974337Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20131, node 1 2025-10-24T16:23:55.043262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:23:55.043281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:23:55.043283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:23:55.043336Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:55.061524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:55.064382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-10-24T16:23:55.065259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:55.071227Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7564824836851286758:2295] 2025-10-24T16:23:55.071325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:23:55.074163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:23:55.074185Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:23:55.074335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:23:55.074340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:23:55.074345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:23:55.074401Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:23:55.074407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:23:55.074416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7564824836851286775:2295] in generation 1 2025-10-24T16:23:55.077923Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:23:55.084377Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:23:55.084572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:23:55.084598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7564824836851286777:2296] 2025-10-24T16:23:55.084601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:55.084604Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:23:55.084607Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.084649Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:23:55.084669Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:23:55.084672Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.084677Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:55.084685Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:23:55.084688Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.084696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824836851286760:2303], serverId# [1:7564824836851286766:2307], sessionId# [0:0:0] 2025-10-24T16:23:55.084844Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:23:55.084918Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:23:55.084938Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-10-24T16:23:55.085334Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:55.085942Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:23:55.085968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:23:55.087256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7564824836851286791:2320], serverId# [1:7564824836851286793:2322], sessionId# [0:0:0] 2025-10-24T16:23:55.088054Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1761323035136 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1761323035136 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:23:55.088062Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.088087Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:23:55.088097Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.088102Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:23:55.088109Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1761323035136:281474976710657] in PlanQueue unit at 72075186224037888 2025-10-24T16:23:55.088169Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1761323035136:281474976710657 keys extracted: 0 2025-10-24T16:23:55.088206Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:23:55.088221Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:23:55.088231Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:23:55.088629Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:23:55.088743Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:23:55.089998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1761323035135 2025-10-24T16:23:55.090009Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.090017Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1761323035136} 2025-10-24T16:23:55.090028Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.090035Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:23:55.090040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:23:55.090043Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:23:55.090057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1761323035136 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7564824832556319183:2140], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:23:55.090064Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-10-24T16:23:55.090071Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:23:55.090083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1761323035136 2025-10-24T16:23:55.090317Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:7564824836851286777:2296][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-10-24T16:23:55.091814Z nod ... ds { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-10-24T16:25:09.765077Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:09.765092Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:09.765139Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-10-24T16:25:09.765168Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:09.765173Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:09.765189Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-10-24T16:25:09.765203Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:637: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-10-24T16:25:09.765222Z node 29 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:09.765235Z node 29 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:09.765240Z node 29 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:25:09.765263Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1248: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-10-24T16:25:09.765272Z node 29 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:09.765284Z node 29 :PERSQUEUE INFO: partition_write.cpp:1710: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-10-24T16:25:09.765305Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-10-24T16:25:09.765356Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-10-24T16:25:09.765404Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2025-10-24T16:25:09.765436Z node 29 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:25:09.765451Z node 29 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-10-24T16:25:09.765705Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [29:801:2637] 2025-10-24T16:25:09.765734Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2025-10-24T16:25:09.765747Z node 29 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:25:09.777018Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:490: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-10-24T16:25:09.777076Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:25:09.777106Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:25:09.777132Z node 29 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-10-24T16:25:09.777175Z node 29 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:09.777181Z node 29 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:09.777185Z node 29 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:25:09.777205Z node 29 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:25:09.777241Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-10-24T16:25:09.777361Z node 29 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][29:934:2681] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-10-24T16:25:09.777388Z node 29 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][29:848:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-10-24T16:25:09.777442Z node 29 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-10-24T16:25:09.777449Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-10-24T16:25:09.777676Z node 29 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-10-24T16:25:09.900524Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:25:09.900550Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-10-24T16:25:09.900636Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037889][Partition][0][StateIdle] read cookie 11 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-10-24T16:25:09.900736Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037889][Partition][0][StateIdle] read cookie 11 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2025-10-24T16:25:09.900744Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037889][Partition][0][StateIdle] Reading cookie 11. Send blob request. 2025-10-24T16:25:09.900764Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 6 times before, last time 1970-01-01T00:00:06.000000Z 2025-10-24T16:25:09.900771Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 3 times before, last time 1970-01-01T00:00:06.000000Z 2025-10-24T16:25:09.900776Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2025-10-24T16:25:09.900792Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-10-24T16:25:09.900798Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-10-24T16:25:09.900803Z node 29 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-10-24T16:25:09.900812Z node 29 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 11. All 6 blobs are from cache. 2025-10-24T16:25:09.900843Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900848Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900852Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900856Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900859Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900864Z node 29 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:25:09.900884Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 6 blobs 2025-10-24T16:25:09.900907Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-10-24T16:25:09.900921Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2025-10-24T16:25:09.900933Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-10-24T16:25:09.900940Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2025-10-24T16:25:09.900948Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2025-10-24T16:25:09.900955Z node 29 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-10-24T16:25:09.901000Z node 29 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> test_canonical_records.py::test_create_drop_and_alter_table |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[select-scalar_subquery-default.txt-Results] [GOOD] >> test.py::test[select-select_all-default.txt-Results] >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::AutoDropping >> IndexBuildTestReboots::BaseCaseWithDataColumnsUniq [GOOD] |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |56.3%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |56.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |56.4%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> TExportToS3Tests::AutoDropping [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> IndexBuildTestReboots::BaseCaseUniq [GOOD] |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_dis_in_in-off-Results] |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test.py::test[join-bush_dis_in_in-off-Results] [SKIPPED] >> test.py::test[join-cbo_7tables_only_common_join--Results] [SKIPPED] >> test.py::test[join-equi_join_two_mult_keys--Results] |56.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |56.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:34.075345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:34.075374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:34.075380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:34.075389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:34.075396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:34.075401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:34.075410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:34.075425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:34.075559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:34.075627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:34.102952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:34.102983Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:34.128918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:34.128971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:34.129016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:34.130606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:34.130772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:34.130901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.130989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:34.131900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:34.131953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:34.132242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:34.132253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:34.132282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:34.132291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:34.132297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:34.132326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.133990Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:34.162467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:34.162560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.162619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:34.162627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:34.162695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:34.162710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:34.172131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.172178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:34.172241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.172255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:34.172261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:34.172268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:34.176197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.176225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:34.176234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:34.178299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.178319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:34.178328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.178336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:34.179103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:34.183149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:34.183228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:34.183467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:34.183500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:34.183511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.183591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:34.183599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:34.183630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:34.183643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:34.188148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:34.188162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 764613Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:11.764618Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-10-24T16:25:11.764623Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-10-24T16:25:11.765032Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:11.765051Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:25:11.765059Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:477:2436] TestWaitNotification: OK eventTxId 102 2025-10-24T16:25:11.765236Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:25:11.765263Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 38us result status StatusSuccess 2025-10-24T16:25:11.765412Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 1 2025-10-24T16:25:11.765482Z node 5 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:25:11.765823Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:11.765833Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:736: TExport::TTxProgress: Resume: id# 102 2025-10-24T16:25:11.765842Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:534: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-10-24T16:25:11.765850Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:11.765866Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-10-24T16:25:11.765871Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:11.765875Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:856: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-10-24T16:25:11.765882Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:526: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-10-24T16:25:11.765896Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:11.766652Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-10-24T16:25:11.766680Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-10-24T16:25:11.766718Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, at schemeshard: 72057594046678944 2025-10-24T16:25:11.767146Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:11.767188Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-10-24T16:25:11.767215Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-10-24T16:25:11.767224Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-10-24T16:25:11.767231Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:11.767236Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:917: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-10-24T16:25:11.767244Z node 5 :EXPORT TRACE: schemeshard_export__create.cpp:918: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000007, drop txId: 281474976710761, source_location: ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:36" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 3 PathDropTxId: 281474976710761 2025-10-24T16:25:11.767268Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:1099: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-10-24T16:25:11.767567Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete 2025-10-24T16:25:11.767604Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:11.767621Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:25:11.767630Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:25:11.767635Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:306: TExport::TTxProgress: DoExecute 2025-10-24T16:25:11.767639Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1236: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-10-24T16:25:11.767644Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1267: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-10-24T16:25:11.767949Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:326: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-10-24T16:25:11.768000Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:25:11.768019Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:25:11.768094Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:25:11.768107Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:25:11.768112Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:695:2649] TestWaitNotification: OK eventTxId 102 |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test.py::test[aggr_factory-median-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-mode-default.txt-Results] >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [GOOD] >> test.py::test[optimizers-yql_6179_merge_chunks_of_outputs--Results] [GOOD] >> test.py::test[order_by-SortByTwoFields--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003388/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk22/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit_log.m6cfnyfd.txt 2025-10-24T16:25:06.293806Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:06.293791Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-10-24T16:25:06.271956Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] >> test.py::test[select-select_all-default.txt-Results] [GOOD] >> test.py::test[select-select_all_ordered-default.txt-Results] |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[order_by-SortByTwoFields--Results] [GOOD] >> test.py::test[order_by-SortByTwoFieldsDesc--Results] >> test.py::test[join-equi_join_two_mult_keys--Results] [GOOD] >> test.py::test[join-grace_join1-map-Results] [SKIPPED] >> test.py::test[join-join_comp_map_table-off-Results] [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test.py::test[select-select_all_ordered-default.txt-Results] [GOOD] >> test.py::test[select-tablename_with_table_row-default.txt-Results] >> test.py::test[join-join_table_conflict_fail--Results] |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |56.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |56.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test.py::test[join-join_table_conflict_fail--Results] [GOOD] >> test.py::test[join-join_without_correlation_and_dict_access-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names-off-Results] |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> test.py::test[join-join_without_correlation_names-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::BaseCaseWithDataColumnsUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.787546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.787564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.787568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.787574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.787579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.787582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.787588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.787600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.787686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.787730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.802462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.802493Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.802575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.805291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.805369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.805402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.807321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.807410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.807499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.807552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.808269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.808303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.808488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.808495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.808516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.808521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.808526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.808543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.809622Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.823825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.823930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.823983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.823990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.824029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.824057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.824708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.824747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.824790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.824796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.824801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.824805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.825181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.825192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.825196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.825470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.825477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.825481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.825486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.825943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.826243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.826273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.826456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.826475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... nsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:25:11.244072Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:25:11.244115Z node 577 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 44us result status StatusSuccess 2025-10-24T16:25:11.244286Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "value" DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:25:11.244404Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:25:11.244451Z node 577 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1/indexImplTable" took 75us result status StatusSuccess 2025-10-24T16:25:11.244582Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |56.6%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/py3test >> test_secrets_usage.py::test_create_eds_with_single_secret_with_success [GOOD] |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[order_by-SortByTwoFieldsDesc--Results] [GOOD] >> test.py::test[order_by-order_by_expr_mul_cols--Results] |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |56.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::BaseCaseUniq [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:20:33.343880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:20:33.343902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.343908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:20:33.343915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:20:33.343921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:20:33.343926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:20:33.343935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:20:33.343949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:20:33.344063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:20:33.344113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:20:33.358366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:20:33.358392Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:33.358464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.360674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:20:33.360736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:20:33.360766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:20:33.362601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:20:33.362689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:20:33.362771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.362823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:20:33.363488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.363518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:20:33.363696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:20:33.363703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:20:33.363724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:20:33.363729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:20:33.363733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:20:33.363750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:20:33.364684Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:20:33.379499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:20:33.379595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.379659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:20:33.379669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:20:33.379728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:20:33.379765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:20:33.380435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.380475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:20:33.380520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.380527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:20:33.380531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:20:33.380535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:20:33.380832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.380844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:20:33.380850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:20:33.381238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.381247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:20:33.381253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:20:33.381260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:20:33.381843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:20:33.382316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:20:33.382366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:20:33.382626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:20:33.382661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... Key { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:25:12.104142Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:25:12.104176Z node 577 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 35us result status StatusSuccess 2025-10-24T16:25:12.104317Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:25:12.104383Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:25:12.104411Z node 577 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1/indexImplTable" took 29us result status StatusSuccess 2025-10-24T16:25:12.104521Z node 577 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test.py::test[aggr_factory-mode-default.txt-Results] [GOOD] >> test.py::test[aggregate-avg_and_sum_float--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |56.7%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003375/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.0j0fs01l.txt 2025-10-24T16:25:12.629973Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |56.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test.py::test[select-tablename_with_table_row-default.txt-Results] [GOOD] |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_secrets_usage.py::test_create_eds_with_many_secrets_with_success [GOOD] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |56.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00335d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit_log.7sjah63x.txt 2025-10-24T16:25:19.318570Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> test_auditlog.py::test_dml_begin_commit_logged >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003354/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit_log.ixn7090s.txt 2025-10-24T16:25:22.215809Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[order_by-order_by_expr_mul_cols--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_nested_left--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test.py::test[order_by-order_by_expr_simple--Results] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-left_semi_with_other--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/ydb-library-yql-dq-comp_nodes-hash_join_utils-ut |56.9%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/ydb-library-yql-dq-comp_nodes-hash_join_utils-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[aggregate-avg_and_sum_float--Results] [GOOD] >> test.py::test[aggregate-compact_distinct--Results] >> test.py::test[aggregate-compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-count_distinct_with_filter--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |56.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/ut/ydb-library-yql-dq-comp_nodes-hash_join_utils-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003358/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit_log._qoviej8.txt 2025-10-24T16:25:20.688960Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:20.688943Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-10-24T16:25:20.663574Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:20.846977Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:20.846961Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-10-24T16:25:20.796269Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:20.997566Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:20.997551Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-10-24T16:25:20.954995Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:21.187873Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:21.187845Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:25:21.128504Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:21.311754Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:21.311742Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-10-24T16:25:21.294741Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:21.472903Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:21.472887Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-10-24T16:25:21.422733Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/py3test >> test_secrets_usage.py::test_create_eds_with_many_secrets_with_success [GOOD] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] >> EraseRowsTests::EraseRowsShouldSuccess |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldNotErase |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> test.py::test[aggregate-count_distinct_with_filter--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_duo--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] |57.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |57.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |57.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> BasicUsage::RecreateObserver [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-10-24T16:24:00.461466Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1761323040461458 2025-10-24T16:24:00.803815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824858762418143:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.803838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:00.813664Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:00.815808Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824856356072793:2261];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:00.815916Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00151c/r3tmp/tmpbL4r3V/pdisk_1.dat 2025-10-24T16:24:00.852515Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:00.904004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:00.914231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.914257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.917882Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:00.918237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:00.922032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:00.922373Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63234, node 1 2025-10-24T16:24:00.936374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:00.936402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:00.941334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:00.996171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00151c/r3tmp/yandexswwgZT.tmp 2025-10-24T16:24:00.996187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00151c/r3tmp/yandexswwgZT.tmp 2025-10-24T16:24:01.008837Z INFO: TTestServer started on Port 29064 GrpcPort 63234 2025-10-24T16:24:01.012094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00151c/r3tmp/yandexswwgZT.tmp 2025-10-24T16:24:01.012203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29064 PQClient connected to localhost:63234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:01.105925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-10-24T16:24:01.138000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:24:01.561028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824863057386396:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.561082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.561525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824863057386410:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.561612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824863057386409:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.561620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:01.562539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:01.614832Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824863057386413:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-10-24T16:24:01.617823Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824860651040218:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:01.618449Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=YjI4MjQxOTUtZjI3ZWFiMzEtZTNmNjk0ZDctNjBkZWNlNjY=, ActorId: [2:7564824860651040201:2295], ActorState: ExecuteState, TraceId: 01k8bghgs4c8vcwsq15wzqmnjz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:01.618797Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:01.619310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:01.669074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:01.693340Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824863057386691:2804] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:01.699371Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824863057386701:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:01.700331Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NGY0MWMzMTItNjIxYWRiMWQtZDQ0MmU1OWMtNzRkMjhiNDc=, ActorId: [1:7564824863057386378:2322], ActorState: ExecuteState, TraceId: 01k8bghgrn9gcns06w838zcftd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:01.700492Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severit ... O: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Closing read session. Close timeout: 0.000000s 2025-10-24T16:25:36.666706Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-10-24T16:25:36.666719Z :INFO: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:25:36.666723Z :NOTICE: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-10-24T16:25:36.666728Z :DEBUG: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] [] Abort session to cluster 2025-10-24T16:25:36.666744Z :INFO: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Closing read session. Close timeout: 0.000000s 2025-10-24T16:25:36.666748Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-10-24T16:25:36.666751Z :INFO: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:25:36.666755Z :NOTICE: [/Root] [/Root] [b1f743d3-c3660243-b5073bf2-297266df] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:25:36.666780Z :INFO: [/Root] [/Root] [bcc51377-48d20de2-9391b296-68ead7a8] Closing read session. Close timeout: 0.000000s 2025-10-24T16:25:36.666782Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:25:36.666785Z :INFO: [/Root] [/Root] [bcc51377-48d20de2-9391b296-68ead7a8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 10 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:25:36.666788Z :NOTICE: [/Root] [/Root] [bcc51377-48d20de2-9391b296-68ead7a8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:25:36.666798Z :INFO: [/Root] [/Root] [28383ed1-b72b10f8-6e1661ba-fccc8b40] Closing read session. Close timeout: 0.000000s 2025-10-24T16:25:36.666801Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-10-24T16:25:36.666803Z :INFO: [/Root] [/Root] [28383ed1-b72b10f8-6e1661ba-fccc8b40] Counters: { Errors: 0 CurrentSessionLifetimeMs: 11 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-10-24T16:25:36.666807Z :NOTICE: [/Root] [/Root] [28383ed1-b72b10f8-6e1661ba-fccc8b40] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-10-24T16:25:36.666823Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_18314952962614490555_v1 grpc read done: success# 0, data# { } 2025-10-24T16:25:36.666833Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_18314952962614490555_v1 grpc read failed 2025-10-24T16:25:36.666839Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_18314952962614490555_v1 grpc closed 2025-10-24T16:25:36.666844Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_18314952962614490555_v1 is DEAD 2025-10-24T16:25:36.667128Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_6398036813865637442_v1 grpc read done: success# 0, data# { } 2025-10-24T16:25:36.667138Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_6398036813865637442_v1 grpc read failed 2025-10-24T16:25:36.667144Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_6398036813865637442_v1 grpc closed 2025-10-24T16:25:36.667149Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_6398036813865637442_v1 is DEAD 2025-10-24T16:25:36.667351Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830961:2531] disconnected. 2025-10-24T16:25:36.667366Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830961:2531] disconnected; active server actors: 1 2025-10-24T16:25:36.667370Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830961:2531] client user disconnected session shared/user_3_3_18314952962614490555_v1 2025-10-24T16:25:36.667382Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1186: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-10-24T16:25:36.667394Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1258: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-10-24T16:25:36.667404Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1325: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2025-10-24T16:25:36.667409Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1402: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000012s 2025-10-24T16:25:36.667443Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_5279574507530882510_v1 grpc read done: success# 0, data# { } 2025-10-24T16:25:36.667450Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_5279574507530882510_v1 grpc read failed 2025-10-24T16:25:36.667454Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer shared/user session shared/user_3_2_5279574507530882510_v1 grpc closed 2025-10-24T16:25:36.667460Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_5279574507530882510_v1 is DEAD 2025-10-24T16:25:36.667539Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830962:2529] disconnected. 2025-10-24T16:25:36.667552Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830962:2529] disconnected; active server actors: 1 2025-10-24T16:25:36.667554Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830962:2529] client user disconnected session shared/user_3_1_6398036813865637442_v1 2025-10-24T16:25:36.667560Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1186: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-10-24T16:25:36.667567Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1258: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-10-24T16:25:36.667570Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1325: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-10-24T16:25:36.667573Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1402: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000004s 2025-10-24T16:25:36.667711Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_5279574507530882510_v1 2025-10-24T16:25:36.667724Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564825270325830967:2539] destroyed 2025-10-24T16:25:36.667739Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_2_5279574507530882510_v1 2025-10-24T16:25:36.667802Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830963:2530] disconnected. 2025-10-24T16:25:36.667810Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830963:2530] disconnected; active server actors: 1 2025-10-24T16:25:36.667812Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [3:7564825270325830963:2530] client user disconnected session shared/user_3_2_5279574507530882510_v1 2025-10-24T16:25:36.677477Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:36.677493Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:36.677498Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:25:36.777859Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:36.777880Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:36.777883Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:25:36.878280Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:36.878296Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:36.878299Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:25:36.978553Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:36.978572Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:36.978585Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:25:37.078881Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:25:37.078898Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:25:37.078901Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-10-24T16:25:35.372871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:25:35.429048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:35.429156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:35.429166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00178e/r3tmp/tmpj8nXHM/pdisk_1.dat 2025-10-24T16:25:35.515545Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:35.516634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:35.516676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:35.516812Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323134934455 != 1761323134934458 2025-10-24T16:25:35.547977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:35.596204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:25:35.629523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:35.712748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:25:35.727160Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:25:35.727231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:35.735564Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:35.735608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:35.735750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:25:35.735757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:25:35.735762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:25:35.735815Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:35.735863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:35.735877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:25:35.746191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:35.751037Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:25:35.751123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:35.751158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:25:35.751164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:35.751170Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:25:35.751175Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:35.751367Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:25:35.751395Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:25:35.751412Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:35.751420Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:35.751430Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:35.751435Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:35.751448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:25:35.751963Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:25:35.752039Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:25:35.752063Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:25:35.752480Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:35.762771Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:25:35.762814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:25:35.897157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:25:35.897875Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:25:35.897896Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:35.897959Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:35.897968Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:25:35.897978Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:25:35.898052Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:25:35.898086Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:25:35.898383Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:35.898404Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:25:35.898802Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:35.898891Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:35.899248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:25:35.899257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:35.899297Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:25:35.899307Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:35.899561Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:35.899571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:35.899577Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:25:35.899592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:35.899601Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:25:35.899611Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:35.900795Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:35.901235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:25:35.901346Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:25:35.901356Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:25:35.903166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7 ... shard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:37.195641Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:25:37.195651Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.195744Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:25:37.195764Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:25:37.195773Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.195778Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:37.195785Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:37.195789Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.195891Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:671:2563], serverId# [2:677:2567], sessionId# [0:0:0] 2025-10-24T16:25:37.195913Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:25:37.195953Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:25:37.195966Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-10-24T16:25:37.196243Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:37.206547Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:25:37.206590Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:25:37.339636Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:707:2585], serverId# [2:709:2587], sessionId# [0:0:0] 2025-10-24T16:25:37.339768Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:25:37.339777Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.339993Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.340006Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:25:37.340018Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976710657] in PlanQueue unit at 72075186224037888 2025-10-24T16:25:37.340078Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976710657 keys extracted: 0 2025-10-24T16:25:37.340103Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:25:37.340162Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.340176Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:25:37.340261Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:37.340345Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:37.340710Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:25:37.340722Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.340841Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:25:37.340852Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.341049Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.341060Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:37.341064Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:25:37.341076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:37.341083Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-10-24T16:25:37.341091Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.341334Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:37.341563Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:25:37.341600Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-10-24T16:25:37.341607Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:25:37.342450Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:743:2613], serverId# [2:744:2614], sessionId# [0:0:0] 2025-10-24T16:25:37.342480Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.362942Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.362971Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.363052Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:743:2613], serverId# [2:744:2614], sessionId# [0:0:0] 2025-10-24T16:25:37.363511Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:749:2619], serverId# [2:750:2620], sessionId# [0:0:0] 2025-10-24T16:25:37.363544Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.363588Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.363594Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.363622Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:749:2619], serverId# [2:750:2620], sessionId# [0:0:0] 2025-10-24T16:25:37.363877Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:754:2624], serverId# [2:755:2625], sessionId# [0:0:0] 2025-10-24T16:25:37.363899Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.363918Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.363923Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.363947Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:754:2624], serverId# [2:755:2625], sessionId# [0:0:0] 2025-10-24T16:25:37.364163Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:759:2629], serverId# [2:760:2630], sessionId# [0:0:0] 2025-10-24T16:25:37.364180Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.364196Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.364201Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.364225Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:759:2629], serverId# [2:760:2630], sessionId# [0:0:0] 2025-10-24T16:25:37.364416Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:764:2634], serverId# [2:765:2635], sessionId# [0:0:0] 2025-10-24T16:25:37.364433Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.364467Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.364472Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.364497Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:764:2634], serverId# [2:765:2635], sessionId# [0:0:0] 2025-10-24T16:25:37.364796Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:769:2639], serverId# [2:770:2640], sessionId# [0:0:0] 2025-10-24T16:25:37.364815Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.364834Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.364839Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.364861Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:769:2639], serverId# [2:770:2640], sessionId# [0:0:0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-10-24T16:25:35.649167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:25:35.704865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:35.704978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:35.704991Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00178d/r3tmp/tmpnGuJxm/pdisk_1.dat 2025-10-24T16:25:35.793082Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:35.794419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:35.794463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:35.794625Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323135212192 != 1761323135212195 2025-10-24T16:25:35.832481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:35.883164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:25:35.918454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:36.008475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:25:36.024728Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:25:36.024822Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:36.033385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:36.033441Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:36.033599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:25:36.033610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:25:36.033618Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:25:36.033698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:36.033730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:36.033746Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:25:36.045268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:36.050547Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:25:36.050674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:36.050721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:25:36.050727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:36.050733Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:25:36.050741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.050979Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:25:36.051015Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:25:36.051034Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.051043Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.051055Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:36.051060Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.051074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:25:36.051222Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:25:36.051305Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:25:36.051330Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:25:36.051734Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:36.062062Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:25:36.062122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:25:36.201120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:25:36.202134Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:25:36.202162Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.202239Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.202249Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:25:36.202261Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:25:36.202366Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:25:36.202405Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:25:36.202760Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.202782Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:25:36.203227Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:36.203335Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.203727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:25:36.203738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.203786Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:25:36.203798Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.204160Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.204174Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:36.204181Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:25:36.204199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:36.204210Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:25:36.204224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.205094Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:36.205583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:25:36.205714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:25:36.205724Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:25:36.207441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7 ... immediate 0 planned 1 2025-10-24T16:25:37.569296Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.569310Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:25:37.569399Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:37.569497Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:37.569867Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:25:37.569879Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.569991Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:25:37.570001Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.570214Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.570224Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:37.570230Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:25:37.570245Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:37.570254Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-10-24T16:25:37.570265Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.570523Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:37.570777Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:25:37.570809Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-10-24T16:25:37.570815Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:25:37.572389Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:37.572409Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:751:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:37.572419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:37.572550Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:37.572574Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:37.573447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:25:37.574469Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:37.615813Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:25:37.709691Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:37.710180Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:25:37.741811Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:827:2660] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:25:37.754138Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710660. Ctx: { TraceId: 01k8bgmeh40zr2685aepnf5fe2, Database: , SessionId: ydb://session/3?node_id=2&id=MjRmMmMzOTgtNDkwYjRjODgtNmU1Nzk0NGEtZmYyNDUwZmI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:25:37.754701Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:858:2677], serverId# [2:859:2678], sessionId# [0:0:0] 2025-10-24T16:25:37.754816Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:25:37.754864Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-10-24T16:25:37.765224Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.766698Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:866:2684], serverId# [2:867:2685], sessionId# [0:0:0] 2025-10-24T16:25:37.766971Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-10-24T16:25:37.777373Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-10-24T16:25:37.777405Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:37.777479Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:25:37.777489Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-10-24T16:25:37.777565Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.777586Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:37.777596Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:37.777611Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.777630Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:866:2684], serverId# [2:867:2685], sessionId# [0:0:0] 2025-10-24T16:25:37.777854Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:25:37.777940Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:25:37.777975Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.777980Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:25:37.777988Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2025-10-24T16:25:37.778029Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:25:37.778036Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.778182Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 1 2025-10-24T16:25:37.778242Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976710661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-10-24T16:25:37.778262Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976710661, PendingAcks: 0 2025-10-24T16:25:37.778268Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976710661, MessageQuota: 0 2025-10-24T16:25:37.778326Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-10-24T16:25:37.778333Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710661, at: 72075186224037888 2025-10-24T16:25:37.778354Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:37.778359Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:25:37.778365Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for ReadTableScan 2025-10-24T16:25:37.778396Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:37.778405Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:37.778414Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test.py::test[order_by-order_by_tablepath_column--Results] [GOOD] >> test.py::test[order_by-ordered_fill--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-10-24T16:25:36.365833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:25:36.410201Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:36.410308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:36.410317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001777/r3tmp/tmpT0P9zS/pdisk_1.dat 2025-10-24T16:25:36.490598Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:36.491259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:36.491283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:36.491364Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323135936779 != 1761323135936782 2025-10-24T16:25:36.522279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:36.570076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:25:36.613731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:36.687239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:25:36.702533Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:25:36.702598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:36.711613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:36.711662Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:36.711840Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:25:36.711882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:25:36.711891Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:25:36.711968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:36.712006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:36.712022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:25:36.722499Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:36.726514Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:25:36.726581Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:36.726624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:25:36.726628Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:36.726631Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:25:36.726634Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.726754Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:25:36.726770Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:25:36.726784Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.726789Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.726795Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:36.726800Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.726810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:25:36.726890Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:25:36.726927Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:25:36.726941Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:25:36.727183Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:36.737471Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:25:36.737519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:25:36.871023Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:25:36.872018Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:25:36.872043Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.872115Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.872123Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:25:36.872134Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:25:36.872214Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:25:36.872249Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:25:36.872533Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.872554Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:25:36.872988Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:36.873084Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.873444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:25:36.873455Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.873499Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:25:36.873509Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.873795Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.873809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:36.873815Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:25:36.873832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:36.873841Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:25:36.873854Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.874772Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:36.875198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:25:36.875312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:25:36.875323Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-10-24T16:25:36.877399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7 ... main_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:25:39.034449Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-10-24T16:25:39.034466Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-10-24T16:25:39.066700Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1256:3026] 2025-10-24T16:25:39.066783Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:39.068316Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:39.068345Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:39.068490Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-10-24T16:25:39.068499Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2025-10-24T16:25:39.068526Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2025-10-24T16:25:39.068569Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:39.068590Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:39.068602Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [2:1272:3026] in generation 1 2025-10-24T16:25:39.090718Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:39.090748Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2025-10-24T16:25:39.090775Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:39.090788Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037894, actorId: [2:1274:3036] 2025-10-24T16:25:39.090793Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2025-10-24T16:25:39.090799Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-10-24T16:25:39.090804Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-10-24T16:25:39.090916Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2025-10-24T16:25:39.090938Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-10-24T16:25:39.090956Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-10-24T16:25:39.090962Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:39.090970Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2025-10-24T16:25:39.090976Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-10-24T16:25:39.091057Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1255:3025], serverId# [2:1263:3030], sessionId# [0:0:0] 2025-10-24T16:25:39.091081Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-10-24T16:25:39.091132Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037894 txId 281474976710663 ssId 72057594046644480 seqNo 2:7 2025-10-24T16:25:39.091150Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710663 at tablet 72075186224037894 2025-10-24T16:25:39.091261Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-10-24T16:25:39.101533Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-10-24T16:25:39.101575Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-10-24T16:25:39.224696Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1280:3042], serverId# [2:1282:3044], sessionId# [0:0:0] 2025-10-24T16:25:39.224804Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976710663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-10-24T16:25:39.224812Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-10-24T16:25:39.224873Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-10-24T16:25:39.224881Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:25:39.224887Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4000:281474976710663] in PlanQueue unit at 72075186224037894 2025-10-24T16:25:39.224957Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976710663 keys extracted: 0 2025-10-24T16:25:39.224982Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:25:39.225004Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-10-24T16:25:39.225017Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-10-24T16:25:39.225112Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:39.225187Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:39.225595Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-10-24T16:25:39.225605Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-10-24T16:25:39.225673Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-10-24T16:25:39.225681Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-10-24T16:25:39.225873Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-10-24T16:25:39.225881Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2025-10-24T16:25:39.225885Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037894 2025-10-24T16:25:39.225896Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4000 : 281474976710663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:25:39.225902Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976710663 state Ready TxInFly 0 2025-10-24T16:25:39.225910Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-10-24T16:25:39.226060Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-10-24T16:25:39.226071Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-10-24T16:25:39.226092Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-10-24T16:25:39.226104Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-10-24T16:25:39.226113Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:25:39.226124Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-10-24T16:25:39.226169Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-10-24T16:25:39.226283Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-10-24T16:25:39.226377Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710663 datashard 72075186224037894 state Ready 2025-10-24T16:25:39.226382Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-10-24T16:25:39.227000Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1309:3065], serverId# [2:1310:3066], sessionId# [0:0:0] 2025-10-24T16:25:39.227035Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1309:3065], serverId# [2:1310:3066], sessionId# [0:0:0] 2025-10-24T16:25:39.227181Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1314:3070], serverId# [2:1315:3071], sessionId# [0:0:0] 2025-10-24T16:25:39.227201Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1314:3070], serverId# [2:1315:3071], sessionId# [0:0:0] 2025-10-24T16:25:39.227344Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1319:3075], serverId# [2:1320:3076], sessionId# [0:0:0] 2025-10-24T16:25:39.227364Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1319:3075], serverId# [2:1320:3076], sessionId# [0:0:0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-10-24T16:25:36.206017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:25:36.248519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:25:36.248611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:25:36.248621Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00178c/r3tmp/tmpnXbfjn/pdisk_1.dat 2025-10-24T16:25:36.322661Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:36.323653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:36.323685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:36.323804Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323135783900 != 1761323135783903 2025-10-24T16:25:36.354954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:25:36.403048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:25:36.435916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:36.519388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:25:36.536489Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:696:2582] 2025-10-24T16:25:36.536598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:36.546102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:36.546144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:36.546301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:25:36.546312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:25:36.546320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:25:36.546379Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:36.546450Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:701:2585] 2025-10-24T16:25:36.546473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:36.547489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:36.547505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:731:2582] in generation 1 2025-10-24T16:25:36.547790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:36.547816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:36.547966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:25:36.547975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:25:36.547981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:25:36.548015Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:36.548062Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:705:2588] 2025-10-24T16:25:36.548091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:25:36.549121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:36.549137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:739:2585] in generation 1 2025-10-24T16:25:36.549265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:25:36.549281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:25:36.549392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-10-24T16:25:36.549400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-10-24T16:25:36.549406Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-10-24T16:25:36.549437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:25:36.549452Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:25:36.549458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:745:2588] in generation 1 2025-10-24T16:25:36.559806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:36.564817Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:25:36.564878Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:36.564905Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:749:2613] 2025-10-24T16:25:36.564911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:25:36.564916Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:25:36.564922Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:25:36.565011Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:36.565018Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:25:36.565027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:36.565034Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:750:2614] 2025-10-24T16:25:36.565038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:25:36.565042Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:25:36.565045Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:25:36.565110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:25:36.565115Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-10-24T16:25:36.565123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:25:36.565131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:751:2615] 2025-10-24T16:25:36.565134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-10-24T16:25:36.565138Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-10-24T16:25:36.565141Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:25:36.565184Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:25:36.565204Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:25:36.565235Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:25:36.565243Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.565251Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:25:36.565256Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:25:36.565264Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:25:36.565273Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:25:36.565360Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:687:2577], serverId# [1:698:2583], sessionId# [0:0:0] 2025-10-24T16:25:36.565367Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:25:36.565371Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:36.565375Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:25:36.565380Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:25:36.565386Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2025-10-24T16:25:36.565394Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxChec ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-10-24T16:25:40.197683Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-10-24T16:25:40.197814Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-10-24T16:25:40.197820Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-10-24T16:25:40.197935Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-10-24T16:25:40.197941Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-10-24T16:25:40.198000Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:256: 72075186224037889 snapshot complete for split OpId 281474976715663 2025-10-24T16:25:40.198045Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2025-10-24T16:25:40.198054Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2025-10-24T16:25:40.198058Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2025-10-24T16:25:40.198063Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2025-10-24T16:25:40.203713Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2025-10-24T16:25:40.203900Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2025-10-24T16:25:40.203910Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2025-10-24T16:25:40.203916Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2025-10-24T16:25:40.203921Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2025-10-24T16:25:40.203946Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2025-10-24T16:25:40.204151Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:424: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2025-10-24T16:25:40.204180Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2365: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-10-24T16:25:40.204194Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2365: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-10-24T16:25:40.204267Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [3:1200:2901], serverId# [3:1201:2902], sessionId# [0:0:0] 2025-10-24T16:25:40.204278Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037892, clientId# [3:1199:2900], serverId# [3:1202:2903], sessionId# [0:0:0] 2025-10-24T16:25:40.204305Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-10-24T16:25:40.204400Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-10-24T16:25:40.204730Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976715663 2025-10-24T16:25:40.204762Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2025-10-24T16:25:40.204778Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:40.204799Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-10-24T16:25:40.204812Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [3:1205:2906] 2025-10-24T16:25:40.204816Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-10-24T16:25:40.204822Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2025-10-24T16:25:40.204827Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-10-24T16:25:40.204899Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2025-10-24T16:25:40.204996Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-10-24T16:25:40.205002Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-10-24T16:25:40.205058Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-10-24T16:25:40.205067Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:40.205075Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-10-24T16:25:40.205081Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-10-24T16:25:40.205125Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1200:2901], serverId# [3:1201:2902], sessionId# [0:0:0] 2025-10-24T16:25:40.205138Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976715663 2025-10-24T16:25:40.205155Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2025-10-24T16:25:40.205167Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:25:40.205176Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-10-24T16:25:40.205183Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037892, actorId: [3:1207:2908] 2025-10-24T16:25:40.205186Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037892 2025-10-24T16:25:40.205191Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2025-10-24T16:25:40.205195Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-10-24T16:25:40.205219Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2025-10-24T16:25:40.205328Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-10-24T16:25:40.205334Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-10-24T16:25:40.205364Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-10-24T16:25:40.205371Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-10-24T16:25:40.205381Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-10-24T16:25:40.205385Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:25:40.205390Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2025-10-24T16:25:40.205394Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-10-24T16:25:40.205403Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1199:2900], serverId# [3:1202:2903], sessionId# [0:0:0] 2025-10-24T16:25:40.205457Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-10-24T16:25:40.205462Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-10-24T16:25:40.226098Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715663 2025-10-24T16:25:40.226951Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-10-24T16:25:40.227494Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2025-10-24T16:25:40.227509Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-10-24T16:25:40.227600Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:25:40.227606Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037889 state 5 2025-10-24T16:25:40.227633Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1093:2821], serverId# [3:1094:2822], sessionId# [0:0:0] 2025-10-24T16:25:40.227666Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2025-10-24T16:25:40.227677Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-10-24T16:25:40.227684Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_with_view--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |57.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |57.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk >> test.py::test[aggregate-group_by_gs_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_hop_list_key--Results] >> test.py::test[aggregate-group_by_hop_list_key--Results] [SKIPPED] >> test.py::test[aggregate-group_by_hop_only--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test.py::test[aggregate-group_by_hop_only--Results] [SKIPPED] >> test.py::test[aggregate-group_by_tz_date--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test.py::test[order_by-ordered_fill--Results] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] |57.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] |57.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |57.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |57.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00332f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log.j3jki6p_.txt 2025-10-24T16:25:38.186156Z: {"tx_id":"01k8bgmf4a7d2v0wbfqfv26pen","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:38.186146Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-10-24T16:25:38.185874Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-10-24T16:25:38.212105Z: {"tx_id":"01k8bgmf4a7d2v0wbfqfv26pen","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:38.212090Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:25:38.189203Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:38.234016Z: {"tx_id":"01k8bgmf4a7d2v0wbfqfv26pen","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:38.234005Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-10-24T16:25:38.232591Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[aggregate-group_by_tz_date--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test.py::test[pg-aggregate_combine_all--Results] >> test.py::test[table_range-concat_with_view--Results] [GOOD] >> test.py::test[table_range-each_with_non_existing_all_fail--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_canonical_records.py::test_dml |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[table_range-each_with_non_existing_all_fail--Results] [GOOD] >> test.py::test[table_range-partition_list--Results] >> test.py::test[table_range-partition_list--Results] [SKIPPED] >> test.py::test[table_range-range_over_filter--Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_canonical_records.py::test_execute_minikql |57.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |57.3%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |57.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |57.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test.py::test[pg-aggregate_combine_all--Results] [GOOD] >> test.py::test[pg-pg_types_orderby--Results] [SKIPPED] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test.py::test[table_range-range_over_filter--Results] [GOOD] >> test.py::test[tpch-q14-default.txt-Results] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test.py::test[aggregate-group_by_with_where-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1024,0,0,0] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::EmptyVariants[2,true,1024,0,0,0] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> KqpOlapJson::FilterVariantsCount[2,false,1024,0,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1024,0,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:24:36.668325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:24:36.668626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:36.668632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:24:36.668640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:24:36.668647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:24:36.668651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:24:36.668661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:24:36.668675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:24:36.669081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:24:36.670163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:24:37.106360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:24:37.106391Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:37.110147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:24:37.110176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:24:37.110210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:24:37.116566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:24:37.116719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:24:37.116831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:37.116903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:24:37.117660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:37.117701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:24:37.117932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:37.117940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:24:37.117965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:24:37.117972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:24:37.117978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:24:37.118000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.122500Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:24:37.257213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:24:37.257583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.257646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:24:37.257655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:24:37.269249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:24:37.270005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:24:37.293259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:37.293902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:24:37.294254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.294267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:24:37.294272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:24:37.294277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:24:37.304605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.304631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:24:37.304640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:24:37.308439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.308461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:24:37.308469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:37.308477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:24:37.312309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:24:37.319271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:24:37.320279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:24:37.320519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:24:37.320572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:24:37.320581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:37.320656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:24:37.320666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:24:37.320698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:24:37.320710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:24:37.321174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:24:37.321182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... unt reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:25:53.793192Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.793199Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.793201Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:25:53.793204Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-10-24T16:25:53.793206Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:25:53.793212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-10-24T16:25:53.793663Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793671Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-10-24T16:25:53.793676Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793835Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-10-24T16:25:53.793856Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-10-24T16:25:53.793922Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793937Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 21474838641 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793942Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-10-24T16:25:53.793967Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710761:0 progress is 1/1 2025-10-24T16:25:53.793972Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:25:53.793976Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710761:0 progress is 1/1 2025-10-24T16:25:53.793978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:25:53.793984Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:25:53.793991Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:25:53.793994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-10-24T16:25:53.793999Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-10-24T16:25:53.794002Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710761:0 2025-10-24T16:25:53.794004Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710761:0 2025-10-24T16:25:53.794010Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:25:53.794015Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-10-24T16:25:53.794018Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:25:53.794021Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-10-24T16:25:53.794119Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794134Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794541Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:25:53.794556Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:25:53.794584Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-10-24T16:25:53.794607Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:25:53.794611Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-10-24T16:25:53.794614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:213:2213], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-10-24T16:25:53.794741Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794750Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794755Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:25:53.794761Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:25:53.794766Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:25:53.794834Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794842Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.794844Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-10-24T16:25:53.794847Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-10-24T16:25:53.794850Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:25:53.794855Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-10-24T16:25:53.794859Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:129:2153] 2025-10-24T16:25:53.795450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.795504Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-10-24T16:25:53.795515Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-10-24T16:25:53.795523Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710761 2025-10-24T16:25:53.795912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:25:53.795924Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:602:2553] TestWaitNotification: OK eventTxId 102 |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |57.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] |57.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> KqpOlapJson::FilterVariantsCount[2,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,0,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> KqpOlapJson::EmptyVariants[2,true,1024,0,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,0,0.5] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,100,0] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> test.py::test[pg-select_subquery2_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_yql_type--Results] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_secrets_usage.py::test_create_eds_with_old_secret_with_success >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,100,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] >> test.py::test[tpch-q14-default.txt-Results] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,0,0.5] [GOOD] >> test.py::test[tpch-q2-default.txt-Results] >> KqpOlapJson::EmptyVariants[2,true,1024,0,100,0] |57.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.5%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |57.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,1000000,0] |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> test_auditlog.py::test_single_dml_query_logged[update] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,1000000,0.5] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,100,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] |57.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,1000000,0.5] [GOOD] |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |57.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_auditlog.py::test_single_dml_query_logged[replace] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> KqpOlapJson::EmptyVariants[2,true,1024,0,100,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,1000000,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[2,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 2016, MsgBus: 20585 2025-10-24T16:25:53.918709Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825341733325270:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:25:53.919010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001268/r3tmp/tmpNeahW9/pdisk_1.dat 2025-10-24T16:25:53.968722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:25:53.978504Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825341733325242:2081] 1761323153918458 != 1761323153918461 2025-10-24T16:25:53.981717Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2016, node 1 2025-10-24T16:25:53.987131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:25:53.987140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:25:53.987142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:25:53.987180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20585 2025-10-24T16:25:54.022354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:54.022386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:54.023415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:25:54.042428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:25:54.147885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:54.273679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825346028293218:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.273701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.273773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825346028293228:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.273786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.311349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:25:54.329997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:25:54.330055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:25:54.330063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:25:54.330077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:25:54.330120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:25:54.330123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:25:54.330159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:25:54.330163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:25:54.330185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:25:54.330185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:25:54.330209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:25:54.330212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:25:54.330240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:25:54.330240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:25:54.330263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:25:54.330269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:25:54.330288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:25:54.330292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:25:54.330321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:25:54.330325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:25:54.330362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346028293289:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:25:54.330362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346028293293:2324];tablet_id=72075186224037888;process=TTxI ... 186224037889;self_id=[7:7564825367649256546:2324];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710658;this=89392104728256;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323159678;max=18446744073709551615;plan=0;src=[7:7564825367649256165:2144];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.679306Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825367649256545:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=89392097114288;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323159679;max=18446744073709551615;plan=0;src=[7:7564825367649256165:2144];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.681236Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.681260Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.681262Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.681967Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.682044Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:25:59.682052Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:25:59.687759Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256648:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.687784Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.687863Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256650:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.687876Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.691468Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:25:59.694185Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:25:59.694186Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:25:59.698949Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256684:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.698977Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.699019Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256686:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.699031Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.703578Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:25:59.707967Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:25:59.707964Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:25:59.713979Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256721:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.714013Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.714034Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256726:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.714049Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825367649256728:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.714063Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:59.714812Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:25:59.722954Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825367649256730:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:25:59.783224Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825367649256781:2468] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:2/0/0 HEADER:0/0/0 |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[binding-table_range_binding-default.txt-Results] |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::EmptyVariants[2,true,1024,0,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,0,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> KqpOlapJson::SimpleExistsVariants[2,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,0,0] |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpOlapJson::EmptyVariants[2,true,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q19-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Query [GOOD] Test command err: 2025-10-24T16:20:41.210371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824001902969849:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:41.210395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:41.225537Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0013ca/r3tmp/tmpbDlGY9/pdisk_1.dat 2025-10-24T16:20:41.284263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:41.299541Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564824001902969823:2081] 1761322841209576 != 1761322841209579 2025-10-24T16:20:41.303135Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7634, node 1 2025-10-24T16:20:41.312068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0013ca/r3tmp/yandexvALjjE.tmp 2025-10-24T16:20:41.312084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0013ca/r3tmp/yandexvALjjE.tmp 2025-10-24T16:20:41.312192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0013ca/r3tmp/yandexvALjjE.tmp 2025-10-24T16:20:41.312240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:41.318932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:41.318961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:41.319869Z INFO: TTestServer started on Port 4843 GrpcPort 7634 2025-10-24T16:20:41.320416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4843 PQClient connected to localhost:7634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:41.357122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:41.362968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:41.483445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:41.627118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001902970664:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.627136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001902970653:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.627158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.627227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824001902970668:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.627247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.627885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:41.664991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-10-24T16:20:41.665065Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824001902970667:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:41.667493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.673998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.690729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:41.720522Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824001902970975:2594] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824001902971013:2620] 2025-10-24T16:20:42.215312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:46.210447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564824001902969849:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:46.210504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:46.905991Z :Write_Only_Big_Messages_In_Wide_Transactions_Query INFO: TTopicSdkTestSetup started 2025-10-24T16:20:46.908561Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:46.912642Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824023377807737:2725] connected; active server actors: 1 2025-10-24T16:20:46.912729Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:46.912867Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:46.912905Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-10-24T16:20:46.914056Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:20:46.914186Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:20:46.914223Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-10-24T16:20:46.914233Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:20:46.914244Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72075186224037892] doesn't have tx writes info 2025-10-24T16:20:46.914275Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:404: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-10-24T16:20:46.914287Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [ ... 86224037897][Partition][{0, {1, 281474976715700}, 100015}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421913Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715700}, 100015}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421915Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715700}, 100015}][StateIdle] Try persist 2025-10-24T16:23:33.421920Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715693}, 100011}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421921Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715693}, 100011}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421922Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715693}, 100011}][StateIdle] Try persist 2025-10-24T16:23:33.421928Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715697}, 100013}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421929Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715697}, 100013}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421930Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715697}, 100013}][StateIdle] Try persist 2025-10-24T16:23:33.421936Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421937Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421938Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][0][StateIdle] Try persist 2025-10-24T16:23:33.421945Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715728}, 100026}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421947Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715728}, 100026}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421948Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715728}, 100026}][StateIdle] Try persist 2025-10-24T16:23:33.421954Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715683}, 100004}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421955Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715683}, 100004}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421956Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715683}, 100004}][StateIdle] Try persist 2025-10-24T16:23:33.421960Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715692}, 100010}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421962Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715692}, 100010}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421963Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715692}, 100010}][StateIdle] Try persist 2025-10-24T16:23:33.421968Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715702}, 100016}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715702}, 100016}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421970Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715702}, 100016}][StateIdle] Try persist 2025-10-24T16:23:33.421975Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715680}, 100002}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421976Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715680}, 100002}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421977Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715680}, 100002}][StateIdle] Try persist 2025-10-24T16:23:33.421982Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715772}, 100050}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421983Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715772}, 100050}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421985Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715772}, 100050}][StateIdle] Try persist 2025-10-24T16:23:33.421990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715787}, 100060}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421991Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715787}, 100060}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421992Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715787}, 100060}][StateIdle] Try persist 2025-10-24T16:23:33.421997Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715817}, 100077}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.421998Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715817}, 100077}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.421999Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715817}, 100077}][StateIdle] Try persist 2025-10-24T16:23:33.422004Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715825}, 100082}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422005Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715825}, 100082}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422006Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715825}, 100082}][StateIdle] Try persist 2025-10-24T16:23:33.422011Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715823}, 100081}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422012Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715823}, 100081}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715823}, 100081}][StateIdle] Try persist 2025-10-24T16:23:33.422018Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715819}, 100078}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422019Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715819}, 100078}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422020Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715819}, 100078}][StateIdle] Try persist 2025-10-24T16:23:33.422026Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715762}, 100043}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422027Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715762}, 100043}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422028Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715762}, 100043}][StateIdle] Try persist 2025-10-24T16:23:33.422034Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715748}, 100034}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422035Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715748}, 100034}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422036Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715748}, 100034}][StateIdle] Try persist 2025-10-24T16:23:33.422041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715763}, 100044}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422042Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715763}, 100044}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422043Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715763}, 100044}][StateIdle] Try persist 2025-10-24T16:23:33.422048Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715779}, 100055}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422049Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715779}, 100055}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422050Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715779}, 100055}][StateIdle] Try persist 2025-10-24T16:23:33.422055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037897][Partition][{0, {1, 281474976715798}, 100066}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:33.422056Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037897][Partition][{0, {1, 281474976715798}, 100066}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:33.422057Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037897][Partition][{0, {1, 281474976715798}, 100066}][StateIdle] Try persist 2025-10-24T16:23:33.422084Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037897][Partition][{0, {1, 281474976715750}, 100035}][StateIdle] No data for blobs compaction 2025-10-24T16:23:33.422122Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037897][Partition][{0, {1, 281474976715705}, 100018}][StateIdle] No data for blobs compaction 2025-10-24T16:23:33.422294Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037898][Partition][{4, {1, 281474976715880}, 100096}][StateIdle] No data for blobs compaction 2025-10-24T16:23:33.422357Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037898][Partition][{4, {1, 281474976715733}, 100027}][StateIdle] No data for blobs compaction |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,100,0.5] |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |57.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |57.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test.py::test[binding-table_range_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] |57.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032f4/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit_log.d0uvc5g6.txt 2025-10-24T16:25:55.755526Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:55.755515Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-10-24T16:25:55.751767Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:55.889264Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:55.889251Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-10-24T16:25:55.883888Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:56.008129Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:56.008120Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-10-24T16:25:56.004959Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:56.122828Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:56.122805Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:25:56.117579Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:56.245779Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:56.245758Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-10-24T16:25:56.241669Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-10-24T16:25:56.363626Z: {"database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:25:56.363613Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-10-24T16:25:56.359621Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_canonical_records.py::test_kill_tablet_using_developer_ui >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,1000000,0] |57.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |57.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0] [GOOD] |57.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test_secrets_usage.py::test_create_eds_with_old_secret_with_success [GOOD] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |57.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::EmptyVariants[2,true,1,1000,1000000,0] [GOOD] Test command err: Trying to start YDB, gRPC: 63625, MsgBus: 11040 2025-10-24T16:25:53.920248Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825342666456825:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:25:53.920267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f53/r3tmp/tmpzsUOVb/pdisk_1.dat 2025-10-24T16:25:53.976146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:25:53.984994Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:25:53.985209Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825342666456799:2081] 1761323153920117 != 1761323153920120 TServer::EnableGrpc on GrpcPort 63625, node 1 2025-10-24T16:25:53.995712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:25:53.995736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:25:53.995738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:25:53.995788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11040 2025-10-24T16:25:54.024070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:25:54.024096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:25:54.025083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:25:54.045463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:25:54.224496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:25:54.322608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825346961424776:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.322635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.322741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825346961424786:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.322753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:25:54.365555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:25:54.381998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:25:54.382082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:25:54.382082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:25:54.382097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:25:54.382162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:25:54.382175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:25:54.382191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:25:54.382204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:25:54.382242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:25:54.382248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:25:54.382295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:25:54.382298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:25:54.382334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:25:54.382334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:25:54.382366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:25:54.382369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:25:54.382394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:25:54.382397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:25:54.382430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:25:54.382440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:25:54.382461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825346961424859:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:25:54.382462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825346961424842:2323];tablet_id=72075186224037888;process=TT ... /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:05.432953Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:05.433060Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825393309224929:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:05.433070Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825393309224930:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:05.433089Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:05.434217Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:05.442360Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825393309224933:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:26:05.501848Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825393309224984:2495] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:26:05.521576Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:26:05.522001Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES (11u), (12u), (13u), (14u) 2025-10-24T16:26:05.549680Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:26:05.549680Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:26:05.922813Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 2025-10-24T16:26:06.384250Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:06.384266Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:06.384274Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:06.384455Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165434;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:06.384472Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165448;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:06.384477Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165462;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:26:06.384634Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825393309224713:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323165476;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761323165476 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:06.385199Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:06.385215Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:06.385220Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:06.385363Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165434;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:06.385383Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165448;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:06.385388Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323165462;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:26:06.385452Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825393309224710:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323165476;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761323165476 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] OUTPUT: [[1u;#];[2u;#];[3u;#];[4u;#];[11u;#];[12u;#];[13u;#];[14u;#]] INDEX:0/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[[""]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[["{\"others\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"columns\":{\"accessor\":[],\"size\":[],\"key_names\":[],\"records\":[]},\"o_size\":0,\"h_size\":10,\"c_size\":0}"]];[[""]];[[""]];[[""]];[[""]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,1000000,0.5] |57.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test.py::test[pg-tpcds-q19-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,1000000,0.5] [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> test.py::test[blocks-block_input_sys_columns--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types-v3-Results] [SKIPPED] >> test.py::test[blocks-combine_all_avg--Results] |58.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleExistsVariants[10,true,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 5124, MsgBus: 64033 2025-10-24T16:26:02.497358Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825379444438651:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:02.497592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001267/r3tmp/tmp6lW7R7/pdisk_1.dat 2025-10-24T16:26:02.532304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:02.543043Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825379444438623:2081] 1761323162497145 != 1761323162497148 2025-10-24T16:26:02.545988Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5124, node 1 2025-10-24T16:26:02.550413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:02.550425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:02.550427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:02.550469Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64033 TClient is connected to server localhost:64033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:02.600866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:02.600904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:02.601869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:26:02.608266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:02.686024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:02.885011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825379444439303:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:02.885039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:02.885140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825379444439313:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:02.885148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:02.932283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:02.951942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:02.952016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:02.952071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:02.952105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:02.952107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:02.952124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:02.952154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:02.952162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:02.952201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:02.952212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:02.952226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:02.952236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:02.952252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:02.952262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:02.952269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:02.952287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:02.952291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:02.952303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:02.952313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:02.952319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825379444439373:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:02.952335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:02.952360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825379444439374:2324];tablet_id=72075186224037889;process=TT ... : 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.409772Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.409949Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.410106Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.410463Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.410632Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.410805Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.410982Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.411147Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:08.411300Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:08.421477Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825406034149272:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:08.421509Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:08.421579Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825406034149277:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:08.421589Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825406034149278:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:08.421609Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:08.422431Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:08.426983Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825406034149281:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:08.478106Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825406034149332:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_EXISTS(Col2, "$.a") ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |58.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,100,0] >> KqpOlapJson::CompactionVariants[2,false,1024,0,1000000,0] |58.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut >> KqpOlapJson::DoubleFilterVariants[10,true,1024,0,0,0.5] |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |58.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TGroupMapperTest::MonteCarlo [GOOD] |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,1000000,0] |58.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[tpch-q2-default.txt-Results] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterVariants[10,true,1024,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,1000000,0.5] |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpOlapJson::CompactionVariants[2,false,1024,0,1000000,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,0,1000000,0.5] |58.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.1%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |58.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService |58.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.2%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> KqpOlap::PushdownFilter >> KqpOlapJson::BrokenJsonWriting[2,true,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,0,0,0,0] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,0,0.5] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::Alter [GOOD] >> Cdc::DescribeStream |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,100,0] |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] >> KqpOlapJson::CompactionVariants[2,false,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,0,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::CompactionVariants[1,false,0,0,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,0,0,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032c1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.e5qum3m2.txt 2025-10-24T16:26:06.877028Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:26:06.877015Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:26:06.853597Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/py3test >> test_secrets_usage.py::test_create_eds_with_old_secret_with_success [GOOD] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlap::PushdownFilter [GOOD] >> KqpOlap::PredicatePushdown_SimpleAsciiILike >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,100,0.5] >> Cdc::DecimalKey [GOOD] >> Cdc::AddColumn |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,0,0,0.5] >> test_secrets_usage.py::test_success_create_eds_with_many_secrets_with_fail |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::CompactionVariants[2,false,1024,10,0,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032bb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log.gd5uuc8p.txt 2025-10-24T16:26:08.252285Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,100,0] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::CompactionVariants[1,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,0,0,100,0] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,1000000,0] |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,false,0,0,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,0,0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] |58.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning |58.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |58.4%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,0,0.5] >> KqpOlapJson::CompactionVariants[2,false,1024,10,0,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,100,0] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,1000000,0] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> KqpOlapJson::CompactionVariants[1,false,0,0,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[1,false,0,0,100,0.5] |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,100,0] >> test.py::test[blocks-combine_all_avg--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032b9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit_log.ha5a65rc.txt 2025-10-24T16:26:09.822403Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:26:09.822387Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-10-24T16:26:09.792052Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,1000000,0.5] >> KqpOlap::PredicatePushdown_SimpleAsciiILike [GOOD] >> KqpOlap::PushdownFilterMultiConsumersRead >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,1000000,0.5] [GOOD] |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032b5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit_log.3kj0z62q.txt 2025-10-24T16:26:10.757378Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:26:10.757357Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-10-24T16:26:10.716134Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |58.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,100,0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,100,0.5] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,0,0] |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |58.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> KqpOlapJson::CompactionVariants[2,false,1024,10,100,0] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,100,0.5] >> KqpOlapJson::SimpleVariants[10,false,1,10,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[10,true,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 9447, MsgBus: 15974 2025-10-24T16:26:09.795549Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825408824781319:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:09.795568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:26:09.804465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001266/r3tmp/tmpB2O5j4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9447, node 1 2025-10-24T16:26:09.840448Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:09.845293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:09.845304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:09.845305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:09.845352Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15974 TClient is connected to server localhost:15974 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:26:09.904081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:09.904122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:09.907727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:26:09.937395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:26:09.956983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:26:09.963332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:26:10.324586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825413119749266:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.324617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.324764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825413119749276:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.324768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.393331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:10.423761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:10.423931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:10.423987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:10.424015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:10.424037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:10.424064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:10.424086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:10.424108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:10.424128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:10.424159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:10.424187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:10.424207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:10.424245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825413119749420:2331];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:10.425440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:10.425456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:10.425498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:10.425517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:10.425535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:10.425558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:10.425579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:10.425598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:10.425614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825413119749406:2325];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstrac ... LUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.076450Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.076608Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.076767Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.077144Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.077304Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.077439Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.077521Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:19.077563Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:19.082641Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825454217885633:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.082663Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.082682Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825454217885638:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.082703Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825454217885640:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.082712Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.083418Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:19.085244Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825454217885642:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:19.156925Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825454217885693:2689] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:8/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 2025-10-24T16:26:19.373682Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0032ac/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.mlzk59d0.txt 2025-10-24T16:26:13.264389Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:26:13.264373Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-10-24T16:26:13.245822Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |58.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpOlapJson::CompactionVariants[1,false,0,0,100,0.5] [GOOD] |58.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/pytest >> test.py::test[blocks-combine_all_avg--Results] [GOOD] |58.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[1,false,0,0,100,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 13589, MsgBus: 16060 2025-10-24T16:26:09.782038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:26:09.782242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825412688067879:2164];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:09.782344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b42/r3tmp/tmpNv0F32/pdisk_1.dat 2025-10-24T16:26:09.816267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:09.816293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:09.818046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13589, node 1 2025-10-24T16:26:09.843412Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:09.852005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825412688067729:2081] 1761323169774638 != 1761323169774641 2025-10-24T16:26:09.852237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:09.852239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:09.852241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:09.852290Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16060 2025-10-24T16:26:09.901372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:09.928959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:26:10.328150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825416983035699:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.328175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.328346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825416983035709:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.328354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.361927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:10.373212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:10.373293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:10.373356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:10.373376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:10.373393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:10.373414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:10.373440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:10.373461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:10.373484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:10.373503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:10.373518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:10.373531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:10.373548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825416983035770:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:10.374595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:10.374611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:10.374628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:10.374634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:10.374656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:10.374661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:10.374682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:10.374686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:10.374694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:10.374699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;e ... ent=0;tx_id=281474976710660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:19.371191Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:19.371952Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825453897915101:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.372107Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.372333Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825453897915113:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.372342Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.380033Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:26:19.395943Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825453897915134:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.395998Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.396368Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825453897915139:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.396380Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825453897915140:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.396459Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:19.397470Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:19.400912Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-10-24T16:26:19.401006Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825453897915143:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:26:19.497041Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825453897915194:2463] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:26:19.531822Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:26:19.604378Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=16;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:26:19.668385Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=22;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:26:19.888671Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:26:20.325739Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:20.325756Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:20.325763Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:20.325998Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323179371;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:20.326021Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323179392;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:20.326030Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323179406;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:26:20.326147Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825453897914964:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323179427;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761323179427 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] |58.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> KqpOlapJson::SimpleVariants[10,false,1,10,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,10,100,0] |58.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |58.5%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-10-24T16:24:01.829412Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1761323041829401 2025-10-24T16:24:01.954349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824861017883251:2162];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.954436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.958288Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824859127386954:2079];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:24:01.958288Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:24:01.958474Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:24:01.960301Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00150a/r3tmp/tmp6iXkDF/pdisk_1.dat 2025-10-24T16:24:02.029066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:02.043253Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:24:02.178206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:02.178230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:02.181490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:24:02.181503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:24:02.183032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:02.185549Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:24:02.186915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:24:02.266588Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:24:02.273052Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 27656, node 1 2025-10-24T16:24:02.330925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:24:02.527816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00150a/r3tmp/yandexFGVFaJ.tmp 2025-10-24T16:24:02.527829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00150a/r3tmp/yandexFGVFaJ.tmp 2025-10-24T16:24:02.528169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00150a/r3tmp/yandexFGVFaJ.tmp 2025-10-24T16:24:02.528218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:24:02.531141Z INFO: TTestServer started on Port 61595 GrpcPort 27656 TClient is connected to server localhost:61595 PQClient connected to localhost:27656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:24:02.964122Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:24:02.965863Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:24:03.034254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:24:05.328677Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824876307256460:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.328715Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824876307256435:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.328727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.331255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824876307256465:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.331279Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:24:05.330835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:24:05.355432Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824876307256464:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:24:05.405958Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7564824878197753392:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:05.406057Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=NDQ3MmM4YWYtZDllZDI1Yi1jZDZhYzA4Yi04MjdjMWZiZg==, ActorId: [1:7564824878197753321:2324], ActorState: ExecuteState, TraceId: 01k8bghmerf3j50kxw699w9w4v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:05.406165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:24:05.406545Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:24:05.454206Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824876307256511:2151] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:24:05.470208Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824876307256518:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:24:05.470796Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=MzI1NzQ2ZTMtZTYwNTZiYTMtNWY5ZjE0ZWYtYWE3YTNjOQ==, ActorId: [2:7564824876307256433:2298], ActorState: ExecuteState, TraceId: 01k8bghmed4e5jmgjmk9979q4w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:24:05.470966Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues ... e 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.648232Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2025-10-24T16:26:20.679555Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2025-10-24T16:26:20.679869Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: try to update token 2025-10-24T16:26:20.679885Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-10-24T16:26:20.680779Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:26:20.680806Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:26:20.680847Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-10-24T16:26:20.681245Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-10-24T16:26:20.681259Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-10-24T16:26:20.681283Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-10-24T16:26:20.681293Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:637: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-10-24T16:26:20.681314Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.681325Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.681329Z node 4 :PERSQUEUE DEBUG: partition.cpp:2354: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-10-24T16:26:20.681367Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-10-24T16:26:20.681428Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-10-24T16:26:20.681437Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:26:20.681491Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 160 WTime 1761323180680 2025-10-24T16:26:20.681521Z node 4 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-10-24T16:26:20.681534Z node 4 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-10-24T16:26:20.680415Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|3aca36ec-15a1353e-884c1793-6a731505_0 grpc read done: success: 1 data: write_request[data omitted] 2025-10-24T16:26:20.680526Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-10-24T16:26:20.681072Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-10-24T16:26:20.684659Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7564824942042683100:2391] 2025-10-24T16:26:20.684690Z node 4 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:26:20.684705Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1186: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2025-10-24T16:26:20.684785Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 160 2025-10-24T16:26:20.688107Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:490: [72075186224037892][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-10-24T16:26:20.688151Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:26:20.688166Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:26:20.688180Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-10-24T16:26:20.688224Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.688228Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.688231Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:26:20.688246Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:26:20.688267Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-10-24T16:26:20.697178Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-10-24T16:26:20.691927Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-10-24T16:26:20.697284Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 7000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-10-24T16:26:20.697293Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-10-24T16:26:20.697298Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-10-24T16:26:20.699969Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|3aca36ec-15a1353e-884c1793-6a731505_0 grpc read done: success: 0 data: 2025-10-24T16:26:20.699989Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|3aca36ec-15a1353e-884c1793-6a731505_0 grpc read failed 2025-10-24T16:26:20.700001Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: src_id|3aca36ec-15a1353e-884c1793-6a731505_0 grpc closed 2025-10-24T16:26:20.700009Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|3aca36ec-15a1353e-884c1793-6a731505_0 is DEAD 2025-10-24T16:26:20.700201Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:26:20.703447Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-10-24T16:26:20.703483Z :ERROR: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-10-24T16:26:20.703487Z :ERROR: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-10-24T16:26:20.703491Z :INFO: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session will now close 2025-10-24T16:26:20.703512Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: aborting 2025-10-24T16:26:20.700573Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [3:7564825373425353630:3345] destroyed 2025-10-24T16:26:20.700600Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:26:20.700610Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.700614Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.700616Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:26:20.709238Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3aca36ec-15a1353e-884c1793-6a731505_0] MessageGroupId [src_id] Write session: destroy 2025-10-24T16:26:20.748536Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.748553Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.748557Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:26:20.851110Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.851141Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.851145Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:26:20.952724Z node 4 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:20.952744Z node 4 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:20.952747Z node 4 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,100,0.5] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> KqpOlap::PushdownFilterMultiConsumersRead [GOOD] >> KqpOlap::ProjectionPushDown >> KqpOlapJson::SimpleVariants[10,false,1,10,100,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,10,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,1000000,0.5] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,100,0.5] [GOOD] >> KqpOlapJson::CompactionVariants[2,false,1024,10,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |58.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] |58.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,100,0.5] [GOOD] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleVariants[10,false,1,10,100,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,1000000,0.5] [GOOD] >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> Cdc::AddStream [GOOD] >> Cdc::DisableStream >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,1024,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 5805, MsgBus: 16295 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001264/r3tmp/tmpp0TkRt/pdisk_1.dat 2025-10-24T16:26:16.207903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:16.207977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 5805, node 1 2025-10-24T16:26:16.231823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:16.231846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:16.231865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:16.231924Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:26:16.232188Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825438806667797:2081] 1761323176136966 != 1761323176136969 2025-10-24T16:26:16.235700Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:16295 2025-10-24T16:26:16.258853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:16.258888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:16.259936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:26:16.310649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:26:16.316222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:16.502385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:16.611278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825438806668479:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.611329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.611445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825438806668489:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.611456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.660256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:16.685944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:16.686019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:16.686119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:16.686141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:16.686160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:16.686177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:16.686195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:16.686212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:16.686233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:16.686251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:16.686273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:16.686293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:16.686310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825438806668541:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:16.686997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:16.687008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:16.687022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:16.687026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:16.687046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:16.687051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:16.687065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:16.687069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:16.687078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:16.687082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:16.687088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::E ... 5Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:23.632343Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:23.632349Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:23.632358Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:23.632365Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:23.632381Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:23.632388Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:23.632401Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:23.632405Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:23.641280Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825469049763299:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=88077854662128;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323183641;max=18446744073709551615;plan=0;src=[7:7564825464754795632:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:23.646602Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:23.646626Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:23.646630Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:23.656075Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763372:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.656097Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.656243Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763375:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.656250Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.657907Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:23.666629Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:23.700595Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:23.703108Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763404:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.703295Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.708158Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763416:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.708180Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.710396Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:26:23.737198Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763437:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.737222Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.737383Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763442:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.737392Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825469049763443:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.737396Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:23.738462Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:23.741682Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825469049763446:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:23.816787Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825469049763497:2440] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:26:23.856550Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[10,true,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 2480, MsgBus: 18722 2025-10-24T16:26:15.746925Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825438245012063:2149];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:15.746939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f51/r3tmp/tmpYU2GXp/pdisk_1.dat 2025-10-24T16:26:15.787494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 2480, node 1 2025-10-24T16:26:15.805094Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:15.815184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:15.815195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:15.815196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:15.815244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:26:15.851918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:15.851953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:15.853148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18722 TClient is connected to server localhost:18722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:15.948990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:15.951723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:15.996635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:16.240102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825442539979910:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.240135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.240342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825442539979919:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.240353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:16.278459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:16.298386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:16.298427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:16.298446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:16.298468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:16.298517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:16.298521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:16.298543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:16.298546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:16.298569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:16.298570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:16.298593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:16.298599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:16.298939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:16.298945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:16.298976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:16.298990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:16.298998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:16.299020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:16.299023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:16.299054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:16.299056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825442539979981:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:16.299074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825442539979980:2323];tablet_id=72075186224037888;process=TTxInit ... l: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082142Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082295Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082442Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082575Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082716Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.082852Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.083006Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:24.083149Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:26:24.092748Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825474876847607:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.092769Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.092865Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825474876847612:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.092871Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825474876847613:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.092916Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.093588Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:24.100002Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-10-24T16:26:24.100076Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825474876847616:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:24.168926Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825474876847667:2688] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; 2025-10-24T16:26:24.353094Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleVariants[10,false,1,10,1000000,0] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,10,1000000,0.5] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003296/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit_log.b63r40ie.txt >> KqpOlapJson::CompactionVariants[2,false,1024,10,1000000,0.5] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleVariants[10,false,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[10,false,1,1000,0,0.5] >> Cdc::DisableStream [GOOD] >> Cdc::AwsRegion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::CompactionVariants[2,false,1024,10,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12657, MsgBus: 9466 2025-10-24T16:26:09.775393Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825408800241498:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:09.775420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f52/r3tmp/tmpx0s5tl/pdisk_1.dat 2025-10-24T16:26:09.839990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:09.847499Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12657, node 1 2025-10-24T16:26:09.863618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:09.863631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:09.863633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:09.863678Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9466 2025-10-24T16:26:09.877801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:09.877832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:09.879187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:26:09.923565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:09.995560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:10.172478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825413095209419:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.172514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.172568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825413095209429:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.172575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:10.221282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:10.238020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:10.238094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:10.238157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:10.238189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:10.238209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:10.238229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:10.238260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:10.238282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:10.238304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:10.238339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:10.238361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:10.238387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:10.238412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825413095209494:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:10.245099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:10.245156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:10.245200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:10.245226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:10.245255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:10.245279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:10.245303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:10.245327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:10.245345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825413095209490:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:10.245365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=720751862240378 ... oad_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.541259Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825474656440856:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.541288Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825474656440857:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.541321Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:24.542511Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:24.547544Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-10-24T16:26:24.547640Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825474656440860:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:26:24.617099Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825474656440911:2499] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:26:24.654636Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:26:24.654711Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(11u, JsonDocument('{"a" : "1a1"}')), (12u, JsonDocument('{"a" : "1a2"}')), (13u, JsonDocument('{"b" : "1b3"}')), (14u, JsonDocument('{"b" : "1b4", "a" : "a4"}')) 2025-10-24T16:26:24.712394Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:26:24.712483Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(10u) 2025-10-24T16:26:24.775144Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=26;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:26:25.457808Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:25.457827Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:25.457832Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:25.458007Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184516;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:26:25.458024Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184544;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:26:25.458029Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184558;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:26:25.458113Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825474656440628:2322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323184579;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761323184579 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:25.466699Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:25.466714Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:25.466719Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:25.466898Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184516;tx_id=281474976715658;;is_diff=0;version=1; 2025-10-24T16:26:25.466914Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184544;tx_id=281474976715659;;is_diff=1;version=2; 2025-10-24T16:26:25.466918Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323184558;tx_id=281474976715660;;is_diff=1;version=3; 2025-10-24T16:26:25.467015Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825474656440651:2327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323184579;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 1761323184579 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> KqpOlap::ProjectionPushDown [GOOD] >> KqpOlap::PredicateWithLimit >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SimpleVariants[10,false,1,1000,0,0.5] [GOOD] |58.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::WaitQueue/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |58.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |58.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> Cdc::AwsRegion [GOOD] |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> YdbSdkSessionsPool::StressTestSync/1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleVariants[10,false,1,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24718, MsgBus: 22012 2025-10-24T16:26:20.782229Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825459965623534:2084];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:20.788138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:26:20.792378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001263/r3tmp/tmpA6i6GM/pdisk_1.dat 2025-10-24T16:26:20.844154Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24718, node 1 2025-10-24T16:26:20.884213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:20.884223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:20.884225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:20.884265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:26:20.888877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:20.889000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:20.889910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22012 2025-10-24T16:26:20.923879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:20.988881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:20.995981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:26:21.268419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825464260591451:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:21.268443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:21.268611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825464260591461:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:21.268619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:21.317045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:21.340589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:21.340645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:21.340697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:21.340717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:21.340744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:21.340768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:21.340793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:21.340814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:21.340836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:21.340861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:21.340884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:21.340909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:21.340930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825464260591604:2331];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:21.347447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:21.347478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:21.347524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:21.347546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:21.347569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:21.347592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:21.347616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:21.347640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:21.347662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825464260591581:2326];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstr ... 5-10-24T16:26:27.908992Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.909201Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.909385Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.909571Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.909700Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.910099Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.910296Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.910300Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:27.910457Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:27.918830Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825488054552868:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:27.918855Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:27.919002Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825488054552874:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:27.919010Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825488054552873:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:27.919027Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:27.920005Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:27.923350Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825488054552877:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:27.936518Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:26:27.977725Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825488054552929:2690] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> YdbSdkSessionsPool1Session::GetSession/0 >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::WaitQueue/1 |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-10-24T16:23:58.793939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:58.902709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:23:58.903050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:23:58.903060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000cea/r3tmp/tmpm6zjE4/pdisk_1.dat 2025-10-24T16:23:59.508241Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:23:59.510597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:59.510623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:59.510718Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323035585678 != 1761323035585681 2025-10-24T16:23:59.548403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:59.718099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:23:59.779867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:59.881789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:23:59.953489Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:23:59.954260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:24:00.052281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:24:00.052324Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:24:00.052603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:24:00.052612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:24:00.052619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:24:00.052675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:24:00.052694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:24:00.052706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:24:00.063227Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:24:00.067412Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:24:00.067519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:24:00.067561Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:24:00.067566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:00.067570Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:24:00.067574Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:00.067752Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:24:00.067775Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:24:00.067791Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:00.067798Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:00.067807Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:24:00.067811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:00.067820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:24:00.067972Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:24:00.068044Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:24:00.068064Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:24:00.068529Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:00.078919Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:24:00.078970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-10-24T16:24:00.223972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:707:2585], serverId# [1:709:2587], sessionId# [0:0:0] 2025-10-24T16:24:00.225185Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-10-24T16:24:00.225215Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:00.225308Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:00.225320Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:24:00.225335Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-10-24T16:24:00.225428Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-10-24T16:24:00.225470Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:24:00.225877Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:24:00.225900Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-10-24T16:24:00.226363Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-10-24T16:24:00.226477Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:24:00.226941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-10-24T16:24:00.226953Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:00.227008Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-10-24T16:24:00.227020Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:00.227364Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:24:00.227376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:24:00.227383Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-10-24T16:24:00.227402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:24:00.227412Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-10-24T16:24:00.227425Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:24:00.227744Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:692:2576][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-10-24T16:24:00.229183Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:24:00.229762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-10-24T16:24:00.229893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-10-24T16:24:00.229903Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got T ... ate before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.885784Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.885788Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.906284Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.906335Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.906340Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.906379Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.906383Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.906388Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.926851Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.926886Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.926890Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.926918Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.926921Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.926925Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.947335Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.947371Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.947376Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.947537Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.947545Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.947550Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.968734Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.968775Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.968782Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.968819Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.968825Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.968831Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.999449Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.999493Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.999498Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:28.999524Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:28.999528Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:28.999532Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.019979Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.020025Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.020034Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.020062Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.020067Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.020071Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.040516Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.040554Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.040560Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.040588Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.040593Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.040598Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.061030Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.061058Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.061063Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:26:29.061103Z node 24 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:26:29.061107Z node 24 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:26:29.061110Z node 24 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-10-24T16:26:29.087783Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:26:29.087807Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-10-24T16:26:29.087890Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-10-24T16:26:29.087915Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 427 count 1 last offset 0, current partition end offset: 1 2025-10-24T16:26:29.087923Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-10-24T16:26:29.087939Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 427 accessed 1 times before, last time 1970-01-01T00:00:02.000000Z 2025-10-24T16:26:29.087948Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2025-10-24T16:26:29.087969Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 1 blobs 2025-10-24T16:26:29.087998Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 1 count 1 size 407 from pos 0 cbcount 1 2025-10-24T16:26:29.088116Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:26:29.088136Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-10-24T16:26:29.088319Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-10-24T16:26:29.088330Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2636: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-10-24T16:26:29.088445Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037891][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-10-24T16:26:29.088462Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037891][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 426 count 1 last offset 0, current partition end offset: 1 2025-10-24T16:26:29.088467Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037891][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-10-24T16:26:29.088480Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 426 accessed 0 times before, last time 1970-01-01T00:00:02.000000Z 2025-10-24T16:26:29.088486Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037891][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2025-10-24T16:26:29.088497Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 1 blobs 2025-10-24T16:26:29.088508Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 1 count 1 size 406 from pos 0 cbcount 1 2025-10-24T16:26:29.088555Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-10-24T16:26:29.088567Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> YdbSdkSessionsPool::PeriodicTask/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00327d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.k0cyh9yn.txt 2025-10-24T16:26:23.298122Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/py3test >> test_multinode_cluster.py::TestSqsMultinodeCluster::test_reassign_master[stop_node] [GOOD] |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> YdbSdkSessionsPool::StressTestAsync/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> YdbSdkSessionsPool1Session::CustomPlan/0 |58.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlap::PredicateWithLimit [GOOD] >> KqpOlap::PredicateWithLimitLostRecords >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_secrets_usage.py::test_success_create_eds_with_many_secrets_with_fail [GOOD] |58.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/kqprun/kqprun |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> KqpOlap::PredicateWithLimitLostRecords [GOOD] >> KqpOlap::PredicateWithTimestampParameter |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool1Session::FailTest/0 >> YdbSdkSessionsPool::StressTestSync/0 >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.8%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |58.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.9%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlap::PredicateWithTimestampParameter [GOOD] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpScheme::BuildingUniqIndexDeniesTableModificationsPublicApi |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicateWithTimestampParameter [GOOD] Test command err: Trying to start YDB, gRPC: 27548, MsgBus: 15681 2025-10-24T16:26:12.146415Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825422187467548:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:12.146442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:26:12.161088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001265/r3tmp/tmpPP99dO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27548, node 1 2025-10-24T16:26:12.236031Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:12.240158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:12.240171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:12.240174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:12.240215Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:26:12.270255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:12.270289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:12.276338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15681 2025-10-24T16:26:12.336574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:12.390891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:12.394041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:26:12.433839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-10-24T16:26:12.443175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];fline=columnshard.cpp:106;event=initialize_shard;step=OnActivateExecutor; 2025-10-24T16:26:12.449053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];fline=columnshard.cpp:125;event=initialize_shard;step=initialize_tiring_finished; 2025-10-24T16:26:12.449111Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037891 2025-10-24T16:26:12.449909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:12.450000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:12.450076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:12.450106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:12.450133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:12.450163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:12.450196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:12.450257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:12.450285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:12.450316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:12.450343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:12.450374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:12.450401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825422187468207:2304];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:12.451344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];fline=columnshard.cpp:106;event=initialize_shard;step=OnActivateExecutor; 2025-10-24T16:26:12.460131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];fline=columnshard.cpp:125;event=initialize_shard;step=initialize_tiring_finished; 2025-10-24T16:26:12.460187Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037889 2025-10-24T16:26:12.461011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:12.461032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:12.461108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:12.461132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:12.461156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:12.461181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:12.461211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:12.461239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:12.461260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:12.461282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825422187468206:2303];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=C ... e=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313017Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313750Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313765Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313768Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313984Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.313999Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314001Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314836Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314852Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314854Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314959Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314974Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.314976Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316075Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316081Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316091Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316091Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316093Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.316093Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317188Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317204Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317207Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317252Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317270Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.317272Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318149Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318166Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318168Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318352Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318367Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.318370Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319244Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319262Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319264Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319481Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319496Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.319499Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320415Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320427Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320428Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320688Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320705Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.320708Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.321161Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.321179Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.321182Z node 9 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-10-24T16:26:33.393065Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterVariants[2,false,1024,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,10,1000000,0] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,0,0] |58.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,0,0] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,0,0] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,0,0] |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterVariants[2,false,1024,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,10,1000000,0.5] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,0,0,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00327e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit_log.o42t4t32.txt 2025-10-24T16:26:24.409015Z: {"commit_tx":"1","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","cloud_id":"cloud-id-A","begin_tx":"1","resource_id":"database-id-C","end_time":"2025-10-24T16:26:24.408997Z","tx_id":"{none}","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:26:24.337004Z","database":"/Root/test_auditlog.py","subject":"root@builtin","status":"SUCCESS","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","component":"grpc-proxy","sanitized_token":"**** (B6C6F477)","detailed_status":"SUCCESS","remote_address":"127.0.0.1"} >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,0,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,0,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,0,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,0,0,0] >> KqpOlap::YqlScriptOltpAndOlap >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,100,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,0,0] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,0,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,100,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,100,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,0,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1024,0,0,0.5] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,0,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,100,0] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,100,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,0,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> KqpOlapJson::SimpleVariants[2,true,0,0,0,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,0,100,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,0,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,100,0] >> KqpOlap::YqlScriptOltpAndOlap [GOOD] >> KqpOlap::WithDefaultValue >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,100,0.5] |59.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |59.0%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |59.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,0,0.5] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,0,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,100,0] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,100,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,1000000,0] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,0,0.5] >> KqpOlap::WithDefaultValue [GOOD] >> KqpOlapAggregations::Aggregation >> KqpOlapJson::QuotedFilterVariants[1,false,1024,0,0,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,0,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,1000000,0] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,100,0.5] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,100,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,1000000,0] >> KqpOlapJson::SimpleVariants[2,true,0,0,100,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,0,100,0.5] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,100,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,100,0.5] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,100,0] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,100,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,100,0.5] |59.0%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,1000000,0.5] |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/py3test >> test_secrets_usage.py::test_success_create_eds_with_many_secrets_with_fail [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,1000000,0] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,0,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,100,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,0,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,100,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,1000000,0.5] >> KqpOlapJson::SimpleVariants[2,true,0,0,100,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,0,1000000,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,100,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,0,0] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,100,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,100,0.5] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> YdbSdkSessionsPool::StressTestSync/1 [FAIL] >> KqpOlapJson::BrokenJsonWriting[1,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::BrokenJsonWriting[1,false,0,1000,0,0.5] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,1000000,0.5] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,1000,0,0.5] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,100,0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,100,0.5] >> KqpOlapJson::SimpleVariants[2,true,0,0,1000000,0] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,0,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,1000000,0] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,0,0.5] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,1000000,0] >> KqpOlapJson::BrokenJsonWriting[1,false,0,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,1000000,0.5] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,1000000,0.5] >> KqpScheme::BuildingUniqIndexDeniesTableModificationsPublicApi [GOOD] >> KqpScheme::AlterUser |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,10,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,0,0] >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,1000,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,100,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,100,0.5] |59.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::BrokenJsonWriting[1,false,0,1000,0,0.5] [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; Trying to start YDB, gRPC: 18514, MsgBus: 24564 2025-10-24T16:26:35.273151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825521608760651:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:35.273182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f4f/r3tmp/tmpfTQFt1/pdisk_1.dat 2025-10-24T16:26:35.326624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:35.327233Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:35.327434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825521608760627:2081] 1761323195273026 != 1761323195273029 TServer::EnableGrpc on GrpcPort 18514, node 1 2025-10-24T16:26:35.336794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:35.336804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:35.336806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:35.336844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24564 TClient is connected to server localhost:24564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:26:35.377094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:35.377129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:35.378299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:35.381258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:35.581120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:35.663811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825521608761303:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.663842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.663947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825521608761313:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.663960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.710202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:35.721104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:35.721166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:35.721239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:35.721271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:35.721302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:35.721327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:35.721358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:35.721387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:35.721408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:35.721436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:35.721466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:35.721488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:35.721516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521608761366:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:35.722132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:35.722145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:35.722160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:35.722170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:35.722195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:35.722199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:35.722212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:35.722217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:35.722224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:35.722229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fl ... description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:40.267020Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:26:40.267026Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:26:40.267050Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:26:40.267074Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:26:40.267086Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:26:40.267092Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:40.267098Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:40.267104Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:40.267110Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:40.267115Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:40.267129Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:40.267133Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:40.267144Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:40.267148Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:40.268613Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825545583053684:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=126715415446528;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323200268;max=18446744073709551615;plan=0;src=[7:7564825541288086040:2157];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.272975Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.272996Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.272999Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:40.289293Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825545583053756:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.289318Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.289358Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825545583053758:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.289378Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.292207Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:40.298982Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:40.316164Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825545583053788:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.316192Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.316886Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:40.317429Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825545583053791:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.317548Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.325742Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=2; 2025-10-24T16:26:40.334069Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564825545583053684:2323];path_id=1000000889;fline=abstract_scheme.cpp:344;event=cannot build accessor;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:26:40.334091Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564825545583053684:2323];path_id=1000000889;fline=pack_builder.cpp:106;event=cannot prepare for write;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:26:40.334095Z node 7 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=72075186224037888;parent_id=[7:7564825545583053684:2323];path_id=1000000889;fline=pack_builder.cpp:218;event=cannot build slice;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:26:40.334127Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825545583053684:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;fline=columnshard__write.cpp:109;writing_size=240;event=data_write_error;writing_id=38690e20-b0f611f0-a0678cee-f733305;reason=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.; 2025-10-24T16:26:40.334189Z node 7 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825545583053684:2323];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=72075186224037888;event=TEvWritePortionResult;tablet_id=72075186224037888;local_tx_no=8;method=execute;tx_info=blobs_writing_failed;fline=events.h:105;event=ev_write_error;status=STATUS_BAD_REQUEST;details=json parsing error: UNCLOSED_STRING: A string is opened, but never closed.;tx_id=140737488355335; Cannot write data into shard(Incorrect request: json parsing error: UNCLOSED_STRING: A string is opened, but never closed.) 72075186224037888 in longTx ydb://long-tx/01k8bgpbtaee4f0yzpn08f76nd?node_id=7 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> KqpOlapJson::SimpleVariants[2,true,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::SimpleVariants[2,true,0,10,0,0.5] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,1000000,0] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,1000000,0] >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,100,0] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,10,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,1000,0,0.5] >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,1000000,0.5] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[1,true,1024,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 3989, MsgBus: 16621 2025-10-24T16:26:35.251829Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825520840226762:2156];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:35.251869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001261/r3tmp/tmpvcRfCU/pdisk_1.dat 2025-10-24T16:26:35.304384Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:35.304548Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825520840226634:2081] 1761323195250917 != 1761323195250920 2025-10-24T16:26:35.304579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3989, node 1 2025-10-24T16:26:35.312782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:35.312793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:35.312794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:35.312833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16621 2025-10-24T16:26:35.354484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:35.354514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:35.355511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:35.370378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:35.530412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:35.636756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825520840227314:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.636784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.636969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825520840227324:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.636980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.677626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:35.687610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:35.687657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:35.687702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:35.687735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:35.687759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:35.687782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:35.687814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:35.687838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:35.687882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:35.687907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:35.687932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:35.687959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:35.687990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825520840227377:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:35.688559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:35.688573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:35.688588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:35.688593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:35.688613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:35.688621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:35.688633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:35.688642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:35.688650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:35.688658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:35.688664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=T ... pshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:40.479223Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:40.479230Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:40.479238Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:40.479244Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:40.479263Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:40.479269Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:40.479284Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:40.479290Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:40.480980Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564825542471064033:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=55108640028848;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323200480;max=18446744073709551615;plan=0;src=[6:7564825538176096258:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.482875Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.482890Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:40.482893Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:40.496027Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064105:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.496056Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.496210Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064107:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.496216Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.499082Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:40.502388Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:40.508370Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064137:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.508394Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.508543Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064140:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.508551Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.510938Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:40.513596Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:40.519867Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064170:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.519893Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.519972Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064175:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.519982Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825542471064176:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.519998Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:40.520878Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:40.527388Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825542471064179:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:40.618651Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825542471064230:2439] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [FAIL] Test command err: ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 7 activeSessionsLimit Which is: 10 >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,0,0] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,0,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003267/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit_log.lsyjkwqc.txt 2025-10-24T16:26:30.340712Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeVariants[2,false,0,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 31587, MsgBus: 18772 2025-10-24T16:26:34.483124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825519778167232:2146];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:34.483160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f50/r3tmp/tmpzZrHdJ/pdisk_1.dat 2025-10-24T16:26:34.530483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:34.530515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:34.530772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:34.533061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:26:34.540476Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:34.540701Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825519778167114:2081] 1761323194480530 != 1761323194480533 TServer::EnableGrpc on GrpcPort 31587, node 1 2025-10-24T16:26:34.555152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:34.555168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:34.555169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:34.555213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18772 TClient is connected to server localhost:18772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:34.608453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:34.611532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:34.740467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:34.851858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825519778167789:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.851885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.851936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825519778167799:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.851945Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.889244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:34.914631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:34.914696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:34.914756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:34.914775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:34.914788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:34.914809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:34.914826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:34.914844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:34.914864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:34.914878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:34.914881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:34.914892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:34.914906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:34.914949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:34.914960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:34.914981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825519778167860:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:34.914986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:34.915010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:34.915036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:34.915062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:34.915087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825519778167861:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... 474976715658:TX_KIND_SCHEMA;min=1761323201099;max=18446744073709551615;plan=0;src=[7:7564825544075238405:2149];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.099692Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825548370206079:2324];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=21229897164432;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323201099;max=18446744073709551615;plan=0;src=[7:7564825544075238405:2149];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.101965Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.101984Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.101987Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.103339Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.103355Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.103356Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:41.115106Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:41.115650Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206177:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.115880Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.116156Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206189:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.116170Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.120285Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:41.120285Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:41.128224Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206213:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.128242Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.128341Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206215:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.128355Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.130795Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:41.133701Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:41.133710Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:26:41.141009Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206250:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.141039Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.141064Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206255:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.141071Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548370206257:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.141077Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.141690Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:41.148179Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825548370206259:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:41.220768Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825548370206310:2468] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:4/0/0 HEADER:0/0/0 >> KqpScheme::AlterUser [GOOD] >> KqpScheme::AsyncReplicationCommitInterval+UseQueryService >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,100,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[2,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 12489, MsgBus: 24074 2025-10-24T16:26:34.480881Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825517233680751:2064];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:34.480902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b3e/r3tmp/tmp8q19ia/pdisk_1.dat 2025-10-24T16:26:34.547193Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:34.547292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825517233680728:2081] 1761323194480755 != 1761323194480758 2025-10-24T16:26:34.547327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 12489, node 1 2025-10-24T16:26:34.556459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:34.556473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:34.556474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:34.556514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24074 2025-10-24T16:26:34.588757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:34.588786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:34.589766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:34.612548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:34.734533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:34.910808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825517233681407:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.910836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.910918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825517233681417:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.910928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.958119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:34.976815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:34.976815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:34.976866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:34.976881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:34.976943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:34.976943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:34.976972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:34.976979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:34.977005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:34.977007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:34.977036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:34.977037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:34.977062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:34.977065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:34.977118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:34.977130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:34.977149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:34.977158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:34.977178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:34.977181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:34.977207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825517233681474:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:34.977216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825517233681475:2324];tablet_id=72075186224037888;process=TT ... et_id=72075186224037889;tx_id=281474976715658;this=89022730715376;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323201129;max=18446744073709551615;plan=0;src=[7:7564825544025343997:2144];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.130990Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.131006Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.131008Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.131189Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.131207Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:41.131221Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:41.139803Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311776:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.139827Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.139880Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311783:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.139892Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.140102Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:41.142347Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:41.142360Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:41.147734Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311812:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.147764Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.147801Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311814:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.147809Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.150640Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:41.154864Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:41.155059Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:41.159903Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311849:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.159920Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.160061Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311854:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.160071Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825548320311855:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.160109Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:41.160626Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:41.162577Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825548320311858:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:41.256347Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825548320311909:2466] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:4/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SimpleVariants[2,true,0,10,0,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,1000000,0.5] >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,1000000,0] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,1000000,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,1000,0,0.5] [GOOD] |59.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,0,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SimpleVariants[2,true,0,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 28938, MsgBus: 22744 2025-10-24T16:26:36.104379Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825525194223585:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:36.104451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125e/r3tmp/tmpNU4dw1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28938, node 1 2025-10-24T16:26:36.157168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:36.157658Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:36.157832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825525194223466:2081] 1761323196103141 != 1761323196103144 2025-10-24T16:26:36.166954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:36.166967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:36.166969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:36.167015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22744 TClient is connected to server localhost:22744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:26:36.214503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:36.214530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:36.215448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:26:36.223483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:36.396289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:36.518873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825525194224141:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.518894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.519030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825525194224151:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.519035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.554640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:36.569795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.569865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.569916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.569943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.570055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.570070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.570085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.570098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.570111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:36.570130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.570145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:36.570156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:36.570169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525194224212:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:36.570461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.570482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.570543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.570566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.570588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.570610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.570633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.570670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825525194224227:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.570675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... 12133927680;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323202003;max=18446744073709551615;plan=0;src=[6:7564825546483151709:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.003939Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564825546483152283:2324];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715658;this=92012133929968;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323202003;max=18446744073709551615;plan=0;src=[6:7564825546483151709:2146];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.005844Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.005867Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.005869Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.006642Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.006657Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.006659Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:42.014595Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119680:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.014614Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.014653Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119682:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.014658Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.017572Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:42.019624Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.019834Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:42.023904Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119716:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.023925Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.023977Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119718:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.023982Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.027577Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:42.029958Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.029959Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:42.035712Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119753:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.035726Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119758:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.035738Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.035804Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825550778119761:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.035817Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.036686Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:42.042075Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825550778119760:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:42.120526Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825550778119813:2469] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,100,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,1000000,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::EmptyVariants[2,true,1024,1000,1000000,0.5] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreFullJsonVariants[10,false,0,1000,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 18987, MsgBus: 26848 2025-10-24T16:26:35.942720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825521646158151:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:35.942769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f4e/r3tmp/tmprfVB0W/pdisk_1.dat 2025-10-24T16:26:35.987715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:35.998862Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:35.999054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825521646158046:2081] 1761323195942109 != 1761323195942112 TServer::EnableGrpc on GrpcPort 18987, node 1 2025-10-24T16:26:36.007543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:36.007552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:36.007553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:36.007591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26848 2025-10-24T16:26:36.051309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:36.051350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26848 2025-10-24T16:26:36.052380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:36.073057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:26:36.272583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:36.372505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825525941126022:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.372531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.372771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825525941126032:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.372777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.415141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:36.438399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.438523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.438584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.438606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.438627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.438648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.438670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.438695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.438727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:36.438756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.438783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:36.438803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:36.438828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825525941126165:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:36.439446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:36.439455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:36.439469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:36.439474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:36.439495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:36.439500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:36.439511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:36.439516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:36.439525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:36.439530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:36.439536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;proces ... e_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329290Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329358Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329439Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329550Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329588Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329726Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329728Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.329870Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1", "d" : null, "e.v" : {"c" : 1, "e" : {"c.a" : 2}}}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3", "e" : ["a", {"v" : ["c", 5]}]}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:42.335647Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825551938151643:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.335671Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825551938151648:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.335675Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.335721Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825551938151651:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.335727Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.336411Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:42.338291Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825551938151650:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:42.414038Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825551938151703:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\",\"d\":\"NULL\",\"e.v\":{\"c\":\"1\",\"e\":{\"c.a\":\"2\"}}}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\",\"e\":[\"a\",{\"v\":[\"c\",\"5\"]}]}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> KqpScheme::AsyncReplicationCommitInterval+UseQueryService [GOOD] >> KqpScheme::AsyncReplicationCommitInterval-UseQueryService >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,100,0.5] [GOOD] >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,100,0.5] [GOOD] |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[2,true,0,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 21146, MsgBus: 6027 2025-10-24T16:26:36.330930Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825526374313976:2195];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:36.330982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125d/r3tmp/tmpKsRk06/pdisk_1.dat 2025-10-24T16:26:36.396002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:36.405906Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21146, node 1 2025-10-24T16:26:36.417840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:36.417853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:36.417855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:36.417908Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:26:36.433820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:36.433855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:36.435603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6027 TClient is connected to server localhost:6027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:36.501003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:36.509277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:36.660456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:36.813863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825526374314484:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.813887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.814036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825526374314494:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.814043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.869181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:36.884261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.884343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.884405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.884436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.884460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.884484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.884508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.884550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.884574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:36.884597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.884624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:36.884657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:36.884680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825526374314555:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:36.886800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.886819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.886866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.886888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.886909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.886931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.886953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.886976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.886995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825526374314571:2324];tablet_id=72075186224037889;process=TTxIni ... 1323202883;max=18446744073709551615;plan=0;src=[7:7564825554343018010:2149];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.885183Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825554343018386:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=17764958967728;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323202885;max=18446744073709551615;plan=0;src=[7:7564825554343018010:2149];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.887479Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.887497Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.887502Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.888927Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.888937Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:42.888940Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:42.895916Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018489:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.895936Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.896072Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018491:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.896079Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.898667Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:42.901379Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.901561Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:42.910349Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018525:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.910370Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.910506Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018527:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.910514Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.913474Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:42.917066Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:42.917132Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:26:42.925564Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018562:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.925595Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.925645Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018567:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.925658Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825554343018568:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.925723Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:42.926548Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:42.929666Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825554343018571:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:42.996310Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825554343018622:2467] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:4/0/0 HEADER:0/0/0 >> test_auditlog.py::test_dynconfig [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterVariants[1,true,1,1000,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 30760, MsgBus: 24132 2025-10-24T16:26:36.970568Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825527661462902:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:36.970610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f4d/r3tmp/tmpwc5fkF/pdisk_1.dat 2025-10-24T16:26:37.026385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:37.047242Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30760, node 1 2025-10-24T16:26:37.072577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:37.072598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:37.076117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:26:37.076360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:37.076362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:37.076363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:37.076400Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24132 TClient is connected to server localhost:24132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:37.184841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:37.196235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:37.291475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:37.449376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825531956430825:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.449424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.449517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825531956430835:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.449524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.492776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:37.506922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:37.507004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:37.507064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:37.507087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:37.507113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:37.507135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:37.507158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:37.507179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:37.507214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:37.507243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:37.507264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:37.507286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:37.507308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825531956430888:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:37.507977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:37.507991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:37.508011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:37.508017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:37.508041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:37.508046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:37.508059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:37.508064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:37.508071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:37.508075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:37.508081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSche ... izer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:43.266080Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:43.266086Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:43.266091Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:43.266096Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:43.266109Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:43.266113Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:43.266123Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:43.266126Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:43.267938Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825556959884263:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=127184639957824;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323203267;max=18446744073709551615;plan=0;src=[7:7564825552664916595:2148];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.269554Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.269574Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.269577Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:43.276659Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884335:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.276675Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.276807Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884337:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.276814Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.279036Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:43.283321Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`100`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:43.290301Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884367:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.290327Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.290507Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884370:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.290535Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.293460Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:43.296951Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:43.306120Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884400:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.306161Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.310395Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884405:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.310422Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825556959884406:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.310503Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.311455Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:43.313570Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825556959884409:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:43.385293Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825556959884460:2440] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.b") = "b3" AND JSON_VALUE(Col2, "$.d") = "d3" ORDER BY Col1; COMPARE: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] OUTPUT: [[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]]] INDEX:2/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\",\"b\":\"b1\",\"c\":\"c1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\",\"d\":\"d3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4asdsasdaa\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::QuotedFilterVariants[1,false,1,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 30364, MsgBus: 28389 2025-10-24T16:26:35.765841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825524464343292:2147];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:35.765895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125f/r3tmp/tmp1Ft2oS/pdisk_1.dat 2025-10-24T16:26:35.812059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:35.812692Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:35.812894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825524464343181:2081] 1761323195764743 != 1761323195764746 TServer::EnableGrpc on GrpcPort 30364, node 1 2025-10-24T16:26:35.821698Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:35.821719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:35.821721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:35.821765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28389 TClient is connected to server localhost:28389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:26:35.869473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:35.869514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:35.870596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:35.876754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:36.037529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:36.145538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825528759311151:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.145564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.145680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825528759311161:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.145689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.201300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:36.214360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.214444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.214510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.214539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.214572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.214590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.214615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.214635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.214656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:36.214719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.214743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:36.214768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:36.214790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528759311214:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:36.215468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:36.215478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:36.215498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:36.215503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:36.215526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:36.215530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:36.215544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:36.215549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:36.215558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:36.215564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:36.215570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process ... CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:43.107559Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:43.107565Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:43.107571Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:43.107576Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:43.107590Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:43.107595Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:43.107603Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:43.107608Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:43.109931Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825555929439223:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976710658;this=126221503478528;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323203109;max=18446744073709551615;plan=0;src=[7:7564825551634471580:2158];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.123625Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.123657Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:43.123661Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:43.142868Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439295:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.142902Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.143029Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439300:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.143059Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.143350Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:43.149397Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:43.168392Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:43.168988Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439327:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.169102Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.169244Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439339:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.169250Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.172339Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a.b.c" : "a1", "b.c.d" : "b1", "c.d.e" : "c1"}')), (2u, JsonDocument('{"a.b.c" : "a2"}')), (3u, JsonDocument('{"b.c.d" : "b3", "d.e.f" : "d3"}')), (4u, JsonDocument('{"b.c.d" : "b4asdsasdaa", "a.b.c" : "a4"}')) 2025-10-24T16:26:43.179218Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439360:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.179239Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439365:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.179248Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.179420Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825555929439368:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.179435Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:43.180134Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:43.189303Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825555929439367:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:43.273297Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825555929439420:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.\"a.b.c\"") = "a2" ORDER BY Col1; COMPARE: [[2u;["{\"a.b.c\":\"a2\"}"]]] OUTPUT: [[2u;["{\"a.b.c\":\"a2\"}"]]] INDEX:1/0/0 HEADER:0/0/0 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT ChunkDetails FROM `/Root/ColumnTable/.sys/primary_index_stats` ORDER BY InternalEntityId, ChunkIdx; OUTPUT: [[[""]];[["{\"others\":{\"accessor\":[1,1,1],\"size\":[6,2,2],\"key_names\":[\"\\\"a.b.c\\\"\",\"\\\"c.d.e\\\"\",\"\\\"d.e.f\\\"\"],\"records\":[3,1,1]},\"columns\":{\"accessor\":[1],\"size\":[15],\"key_names\":[\"\\\"b.c.d\\\"\"],\"records\":[3]},\"o_size\":672,\"h_size\":760,\"c_size\":280}"]]] INDEX:0/0/0 HEADER:0/0/0 |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[1,true,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,0,0.5] >> KqpScheme::AsyncReplicationCommitInterval-UseQueryService [GOOD] >> KqpScheme::AsyncReplicationConnectionString |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,1000000,0] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,100,0] |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::EmptyVariants[2,true,1024,1000,1000000,0.5] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,0,0,0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/003236/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.1yhq9fap.txt 2025-10-24T16:26:38.375329Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> KqpScheme::AsyncReplicationConnectionString [GOOD] >> KqpScheme::AsyncReplicationConnectionStringWithSsl >> KqpOlapAggregations::Aggregation [GOOD] >> KqpOlapAggregations::AggregationCountPushdown >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,100,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,0,0.5] [GOOD] |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::FilterVariantsCount[1,false,0,0,0,0] [GOOD] >> KqpOlapJson::FilterVariantsCount[1,false,0,0,0,0.5] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,0,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 2099, MsgBus: 9196 2025-10-24T16:26:34.636515Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825516882593529:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:34.636604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001262/r3tmp/tmpeKhhDl/pdisk_1.dat 2025-10-24T16:26:34.683576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:34.683659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825516882593410:2081] 1761323194635599 != 1761323194635602 2025-10-24T16:26:34.684063Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2099, node 1 2025-10-24T16:26:34.692298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:34.692311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:34.692313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:34.692344Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9196 TClient is connected to server localhost:9196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:26:34.745185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:34.745227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:34.746143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:34.751538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:26:34.926579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:34.986015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825516882594091:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.986043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.986112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825516882594101:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:34.986123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.018603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:35.029149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:35.029199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:35.029250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:35.029279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:35.029294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:35.029309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:35.029325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:35.029350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:35.029368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:35.029396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:35.029417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:35.029440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:35.029466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825521177561450:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:35.030052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:35.030088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:35.030115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:35.030130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:35.030153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:35.030160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:35.030172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:35.030178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:35.030186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:35.030192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:35.030198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxU ... son=equal_to_use;from=2;to=3; 2025-10-24T16:26:45.989479Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:45.989483Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:45.989486Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:45.989537Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564825559834103088:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205040;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:45.989546Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564825559834103088:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205068;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:45.989579Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205040;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:45.989587Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[6:7564825559834103088:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323205215;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761323205215 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:45.989588Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205068;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:45.989605Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205215;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:26:45.989653Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825559834103097:2331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323205236;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761323205236 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:45.989764Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825559834103087:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:45.989768Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825559834103087:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:45.989804Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:45.989807Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:45.989810Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:45.989832Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825559834103087:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205040;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:45.989839Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825559834103087:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205068;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:45.989880Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825559834103087:2325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323205215;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761323205215 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:45.989891Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205040;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:45.989898Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205068;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:45.989902Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323205215;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:26:45.989940Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[6:7564825559834103098:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323205236;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761323205236 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,10,0,0.5] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> KqpScheme::AsyncReplicationConnectionStringWithSsl [GOOD] >> KqpScheme::AsyncReplicationEndpointAndDatabase |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,1000000,0] >> KqpOlapAggregations::AggregationCountPushdown [GOOD] >> KqpOlapAggregations::AggregationCountGroupByPushdown >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> KqpOlapJson::FilterVariantsCount[1,false,0,0,0,0.5] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,1000000,0.5] >> KqpScheme::AsyncReplicationEndpointAndDatabase [GOOD] >> KqpScheme::AlterTransfer_QueryService >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::FilterVariantsCount[1,false,0,0,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 23160, MsgBus: 8356 2025-10-24T16:26:35.243237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825524220247276:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:35.243257Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001260/r3tmp/tmpMVU1ns/pdisk_1.dat 2025-10-24T16:26:35.275027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:35.288595Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:35.288785Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825524220247245:2081] 1761323195242952 != 1761323195242955 TServer::EnableGrpc on GrpcPort 23160, node 1 2025-10-24T16:26:35.295634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:35.295645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:35.295647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:35.295683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8356 TClient is connected to server localhost:8356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:35.353559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:35.353590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:35.354391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:26:35.354608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:26:35.553334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:35.644481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825524220247920:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.644510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.644579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825524220247930:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.644588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:35.680689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:35.695732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:35.695794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:35.695885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:35.695907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:35.695931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:35.695952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:35.695971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:35.695995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:35.696017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:35.696038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:35.696063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:35.696082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:35.696102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825524220247993:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:35.696798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825524220247994:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:35.696800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:35.696808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:35.696817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:35.696821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:35.696833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:35.696836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:35.696844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:35.696844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825524220247994:2324];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:35.696847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:35.696852Z node 1 :TX_COLUMNSHARD WA ... WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:26:47.282722Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:26:47.282727Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:47.282732Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:26:47.282737Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:26:47.282743Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:26:47.282748Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:26:47.282762Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:26:47.282767Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:26:47.282777Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:26:47.282781Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:26:47.284325Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825573356586206:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=19865210548176;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323207284;max=18446744073709551615;plan=0;src=[7:7564825569061618534:2145];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:47.286490Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:47.286506Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:26:47.286509Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:26:47.295389Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586278:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.295413Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.295480Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586280:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.295492Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.298914Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:47.301468Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`0`, `SPARSED_DETECTOR_KFF`=`0`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:26:47.307803Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586310:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.307843Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.307967Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586312:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.307978Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.311187Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:26:47.317317Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 0 ColumnsLimit: 0 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1", "b" : "b1", "c" : "c1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3", "d" : "d3"}')), (4u, JsonDocument('{"b" : "b4asdsasdaa", "a" : "a4"}')) 2025-10-24T16:26:47.323050Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586343:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.323076Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.323124Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586348:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.323147Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825573356586349:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.323168Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:47.324122Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:47.330527Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825573356586352:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:26:47.422620Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825573356586403:2438] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT COUNT(*) FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "a2"; COMPARE: [[1u]] OUTPUT: [[1u]] INDEX:1/0/0 HEADER:0/0/0 |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> KqpOlapAggregations::AggregationCountGroupByPushdown [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols >> test_auditlog.py::test_single_dml_query_logged[select] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,0,1000000,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,0,0] >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,10,0,0.5] [GOOD] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::SwitchAccessorCompactionVariants[10,false,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 6573, MsgBus: 7528 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125c/r3tmp/tmpLTaRB2/pdisk_1.dat 2025-10-24T16:26:36.999604Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:36.999661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:26:36.999668Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6573, node 1 2025-10-24T16:26:37.012020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:37.012033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:37.012035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:37.012078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7528 2025-10-24T16:26:37.047124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:37.047160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:37.048104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:37.092876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:37.100221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:26:37.264388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:37.371425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825531524736197:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.371449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.371546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825531524736207:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.371556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.413389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:37.444526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:37.444596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:37.444675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:37.444700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:37.444722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:37.444750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:37.444777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:37.444798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:37.444824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:37.444845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:37.444867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:37.444892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:37.444911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825531524736347:2329];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:37.448780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:37.448819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:37.448866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:37.448888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:37.448906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:37.448926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:37.448942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:37.448960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:37.449021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825531524736327:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:37.449038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:756482553152473 ... og.cpp:841: tablet_id=72075186224037888;self_id=[6:7564825574281852762:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207959;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:26:48.744692Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[6:7564825574281852762:2332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323207973;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761323207973 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:48.744814Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825574281852695:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:48.744823Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825574281852695:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:48.744872Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:48.744879Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:48.744883Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:48.744892Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825574281852695:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207791;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:48.744900Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825574281852695:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207812;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:48.744953Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[6:7564825574281852695:2328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323207959;tx_id=281474976710664;;new_schema=Id: 0 SinceStep: 1761323207959 SinceTxId: 281474976710664 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 3 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:48.744956Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207791;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:48.744964Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207812;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:48.744977Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207959;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:26:48.745025Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[6:7564825574281852686:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323207973;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761323207973 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:26:48.745224Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:26:48.745233Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:26:48.745237Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:26:48.745325Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207791;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:26:48.745337Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207812;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:26:48.745341Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323207959;tx_id=281474976710664;;is_diff=1;version=3; 2025-10-24T16:26:48.745384Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[6:7564825574281852691:2326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323207973;tx_id=281474976710665;;new_schema=Id: 0 SinceStep: 1761323207973 SinceTxId: 281474976710665 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 2 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"a1\"}"]];[2u;["{\"a\":\"a2\"}"]];[3u;["{\"b\":\"b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]];[10u;#];[11u;["{\"a\":\"1a1\"}"]];[12u;["{\"a\":\"1a2\"}"]];[13u;["{\"b\":\"1b3\"}"]];[14u;["{\"a\":\"a4\",\"b\":\"1b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,0,0.5] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |59.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |59.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,0,0.5] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> YdbSdkSessionsPool::PeriodicTask/1 >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,100,0.5] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,100,0.5] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/slow/unittest >> TxUsage::Write_Only_Big_Messages_In_Wide_Transactions_Table [GOOD] Test command err: 2025-10-24T16:20:40.908032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564823998920885803:2178];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:40.908081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:20:40.919152Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00133a/r3tmp/tmpocIQ18/pdisk_1.dat 2025-10-24T16:20:41.005158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:20:41.006204Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:20:41.006431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564823998920885662:2081] 1761322840906532 != 1761322840906535 TServer::EnableGrpc on GrpcPort 28950, node 1 2025-10-24T16:20:41.021106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:20:41.021138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:20:41.022203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:20:41.029799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00133a/r3tmp/yandexqvMn9G.tmp 2025-10-24T16:20:41.029808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00133a/r3tmp/yandexqvMn9G.tmp 2025-10-24T16:20:41.029893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00133a/r3tmp/yandexqvMn9G.tmp 2025-10-24T16:20:41.029930Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:20:41.044568Z INFO: TTestServer started on Port 27066 GrpcPort 28950 TClient is connected to server localhost:27066 PQClient connected to localhost:28950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:20:41.113536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-10-24T16:20:41.125744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... waiting... 2025-10-24T16:20:41.217803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:20:41.463104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824003215853793:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.463126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824003215853766:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.463188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.463878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:20:41.464096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824003215853825:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.464115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.464140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564824003215853829:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.464149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:20:41.466144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564824003215853796:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:20:41.508150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.515947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:20:41.530011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-10-24T16:20:41.566611Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564824003215854111:2595] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7564824003215854138:2617] 2025-10-24T16:20:41.908622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:20:45.907953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564823998920885803:2178];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:20:45.907995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-10-24T16:20:46.776419Z :Write_Only_Big_Messages_In_Wide_Transactions_Table INFO: TTopicSdkTestSetup started 2025-10-24T16:20:46.779056Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-10-24T16:20:46.782537Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037893][] pipe [1:7564824024690690872:2726] connected; active server actors: 1 2025-10-24T16:20:46.782606Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1519: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-10-24T16:20:46.782739Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:942: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-10-24T16:20:46.782770Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:140: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-10-24T16:20:46.783310Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:20:46.783480Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:20:46.783521Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-10-24T16:20:46.783532Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72075186224037892] disable metering: reason# ... cts: 0 2025-10-24T16:23:28.312821Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715743}, 100033}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715743}, 100033}][StateIdle] Try persist 2025-10-24T16:23:28.312826Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715742}, 100032}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312828Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715742}, 100032}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312828Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715742}, 100032}][StateIdle] Try persist 2025-10-24T16:23:28.312833Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715784}, 100059}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312834Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715784}, 100059}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312835Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715784}, 100059}][StateIdle] Try persist 2025-10-24T16:23:28.312840Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715824}, 100081}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312843Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715824}, 100081}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312844Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715824}, 100081}][StateIdle] Try persist 2025-10-24T16:23:28.312849Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715762}, 100043}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312850Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715762}, 100043}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312853Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715762}, 100043}][StateIdle] Try persist 2025-10-24T16:23:28.312858Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715798}, 100067}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312859Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715798}, 100067}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312860Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715798}, 100067}][StateIdle] Try persist 2025-10-24T16:23:28.312864Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715818}, 100077}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312865Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715818}, 100077}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312869Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715818}, 100077}][StateIdle] Try persist 2025-10-24T16:23:28.312873Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715812}, 100074}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312874Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715812}, 100074}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312875Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715812}, 100074}][StateIdle] Try persist 2025-10-24T16:23:28.312879Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715777}, 100054}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312880Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715777}, 100054}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312885Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715777}, 100054}][StateIdle] Try persist 2025-10-24T16:23:28.312890Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037913][Partition][{6, {1, 281474976715876}, 100092}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312891Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037913][Partition][{6, {1, 281474976715876}, 100092}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312892Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037913][Partition][{6, {1, 281474976715876}, 100092}][StateIdle] Try persist 2025-10-24T16:23:28.312956Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715825}, 100082}][StateIdle] Try persist 2025-10-24T16:23:28.312962Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][{8, {1, 281474976715840}, 100088}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312963Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][{8, {1, 281474976715840}, 100088}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312964Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715840}, 100088}][StateIdle] Try persist 2025-10-24T16:23:28.312969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][{8, {1, 281474976715821}, 100079}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312970Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037903][Partition][{10, {1, 281474976715900}, 100095}][StateIdle] No data for blobs compaction 2025-10-24T16:23:28.312971Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][{8, {1, 281474976715821}, 100079}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312972Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715821}, 100079}][StateIdle] Try persist 2025-10-24T16:23:28.312977Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715888}, 100094}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312978Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][{8, {1, 281474976715824}, 100081}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312979Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715888}, 100094}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312979Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][{8, {1, 281474976715824}, 100081}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312980Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037903][Partition][{10, {1, 281474976715888}, 100094}][StateIdle] Try persist 2025-10-24T16:23:28.312980Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715824}, 100081}][StateIdle] Try persist 2025-10-24T16:23:28.312985Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715759}, 100041}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312986Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715759}, 100041}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312987Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037903][Partition][{10, {1, 281474976715759}, 100041}][StateIdle] Try persist 2025-10-24T16:23:28.312988Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][{8, {1, 281474976715763}, 100044}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312989Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][{8, {1, 281474976715763}, 100044}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715763}, 100044}][StateIdle] Try persist 2025-10-24T16:23:28.312992Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715755}, 100038}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312993Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715755}, 100038}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312995Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037903][Partition][{10, {1, 281474976715755}, 100038}][StateIdle] Try persist 2025-10-24T16:23:28.312995Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037895][Partition][{8, {1, 281474976715770}, 100049}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.312997Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037895][Partition][{8, {1, 281474976715770}, 100049}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.312998Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037895][Partition][{8, {1, 281474976715770}, 100049}][StateIdle] Try persist 2025-10-24T16:23:28.312999Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715753}, 100037}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.313004Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715753}, 100037}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.313005Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037903][Partition][{10, {1, 281474976715753}, 100037}][StateIdle] Try persist 2025-10-24T16:23:28.313009Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715742}, 100032}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.313010Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715742}, 100032}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:23:28.313011Z node 1 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037903][Partition][{10, {1, 281474976715742}, 100032}][StateIdle] Try persist 2025-10-24T16:23:28.313020Z node 1 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037903][Partition][{10, {1, 281474976715689}, 100008}][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:23:28.313021Z node 1 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037903][Partition][{10, {1, 281474976715689}, 100008}][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariants[10,false,0,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 9914, MsgBus: 2901 2025-10-24T16:26:45.214934Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825563825055312:2194];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:45.215030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125b/r3tmp/tmpfYvcuF/pdisk_1.dat 2025-10-24T16:26:45.297878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:45.318364Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:45.328904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:45.328977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9914, node 1 2025-10-24T16:26:45.330227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:26:45.342704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:45.342717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:45.342719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:45.342770Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2901 TClient is connected to server localhost:2901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:45.406641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10); 2025-10-24T16:26:45.572102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:45.684630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825563825055816:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:45.684663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:45.684786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825563825055826:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:45.684796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:45.744078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:26:45.770006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:45.770082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:45.770148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:45.770177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:45.770202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:45.770227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:45.770254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:45.770279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:45.770301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:45.770329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:45.770354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:45.770381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:45.770405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825563825055925:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:45.772797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:45.772853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:45.772903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:45.772930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:45.772953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:45.772979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:45.773006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:45.773025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:45.773050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825563825055933:2329];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:45.773074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075 ... Id: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.929458Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.929654Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.929666Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.929852Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.929927Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.930012Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.930087Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.930161Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:26:51.930328Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 0 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4"}'), "value4") 2025-10-24T16:26:51.935465Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825592695067835:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:51.935488Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:51.935507Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825592695067840:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:51.935530Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825592695067842:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:51.935539Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:51.936185Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:26:51.942483Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825592695067843:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:26:52.001889Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825596990035191:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND Col3 = "value2" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:6/0/0 HEADER:0/0/0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00320e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_dynconfig/audit_log.jpvak9h4.txt 2025-10-24T16:26:44.133551Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> YdbSdkSessions::TestMultipleSessions >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> YdbSdkSessions::TestSessionPool [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::MultiThreadSync [GOOD] |59.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/config/ut/ydb-services-config-ut >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |59.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.3%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> KqpScheme::AlterTransfer_QueryService [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> KqpOlapAggregations::AggregationAndFilterPushdownOnDiffCols [GOOD] >> KqpOlapAggregations::Aggregation_Avg >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs |59.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> RetryPolicy::TWriteSession_SeqNoShift >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> KqpOlapAggregations::Aggregation_Avg [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,0,0,0,0] >> KqpSchemeFulltext::CreateTableWithIndex+UseQueryClient >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,100,0] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,0,0] >> TResourcePoolTestReboots::SimpleDropResourcePoolWithReboots >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/0031b9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.e4f_63zp.txt 2025-10-24T16:27:02.316725Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:27:02.316712Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-10-24T16:27:02.302983Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,0,0,0,0] [GOOD] >> KqpSchemeFulltext::CreateTableWithIndex+UseQueryClient [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,100,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,0,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,0,0,0,0.5] >> KqpSchemeFulltext::CreateTableWithIndex-UseQueryClient >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,1000000,0] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,100,0] >> KqpSchemeFulltext::CreateTableWithIndex-UseQueryClient [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,true,0,0,0,0.5] [GOOD] >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag+UseQueryClient >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,100,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,0,0] >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag+UseQueryClient [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,100,0.5] [GOOD] >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag-UseQueryClient >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,0,0] [GOOD] |59.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapAggregations::Aggregation_Avg [GOOD] Test command err: Trying to start YDB, gRPC: 25959, MsgBus: 27804 2025-10-24T16:26:36.084737Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825528711980306:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:26:36.084759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b35/r3tmp/tmp5IE9vc/pdisk_1.dat 2025-10-24T16:26:36.123172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:26:36.137596Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:36.137830Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825528711980278:2081] 1761323196084492 != 1761323196084495 TServer::EnableGrpc on GrpcPort 25959, node 1 2025-10-24T16:26:36.150031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:36.150053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:36.150055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:36.150103Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27804 2025-10-24T16:26:36.193492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:36.193542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:36.194654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:36.248862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:36.252356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:26:36.276554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-10-24T16:26:36.293938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:26:36.293989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:26:36.294045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:26:36.294077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:26:36.294164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:26:36.294190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:26:36.294216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:26:36.294234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:26:36.294254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:26:36.294277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.294295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:26:36.294314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:26:36.294331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825528711980954:2301];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:26:36.294968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:26:36.294986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:26:36.295001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:26:36.295006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:26:36.295028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:26:36.295037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:26:36.295050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:26:36.295055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:26:36.295064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:26:36.295072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:26:36.295079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-10-24T16:26:36.295084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-10-24T16:26:36.295109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-10-24T16:26:36.295119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-10-24T16:26:36.295138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:26:36.295147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:26:36.295153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descripti ... ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 3550436 UpdateTimeMs: 1761323228240 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:08.240688Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [10:1769:3031], 2025-10-24T16:27:08.537706Z node 11 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[11:1306:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:263;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-10-24T16:27:08.888112Z node 10 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 65538 2025-10-24T16:27:08.888217Z node 10 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Send stats to executor actor [10:1763:2927] TaskId: 66 Stats: CpuTimeUs: 77979 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 4198047 UpdateTimeMs: 1761323228888 } MaxMemoryUsage: 1048576 2025-10-24T16:27:08.888280Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [10:1769:3031], task: 66, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 77979 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 4198047 UpdateTimeMs: 1761323228888 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:08.888303Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [10:1769:3031], 2025-10-24T16:27:09.186466Z node 11 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[11:1306:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:263;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-10-24T16:27:09.545724Z node 10 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 65538 2025-10-24T16:27:09.545827Z node 10 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Send stats to executor actor [10:1763:2927] TaskId: 66 Stats: CpuTimeUs: 78123 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 4855649 UpdateTimeMs: 1761323229545 } MaxMemoryUsage: 1048576 2025-10-24T16:27:09.545876Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [10:1769:3031], task: 66, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 78123 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 4855649 UpdateTimeMs: 1761323229545 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:09.545897Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [10:1769:3031], 2025-10-24T16:27:09.841945Z node 11 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[11:1306:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:263;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-10-24T16:27:10.274002Z node 10 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 65538 2025-10-24T16:27:10.274098Z node 10 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Send stats to executor actor [10:1763:2927] TaskId: 66 Stats: CpuTimeUs: 78254 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 5583927 UpdateTimeMs: 1761323230274 } MaxMemoryUsage: 1048576 2025-10-24T16:27:10.274143Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [10:1769:3031], task: 66, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 78254 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 5583927 UpdateTimeMs: 1761323230274 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:10.274162Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [10:1769:3031], 2025-10-24T16:27:10.621005Z node 11 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[11:1306:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:263;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-10-24T16:27:10.999778Z node 10 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 65538 2025-10-24T16:27:10.999889Z node 10 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2021: SelfId: [10:1769:3031], TxId: 281474976710662, task: 66. Ctx: { CheckpointId : . TraceId : 01k8bgq2yz2hefzmc52q6dy4xp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Send stats to executor actor [10:1763:2927] TaskId: 66 Stats: CpuTimeUs: 78379 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 6309701 UpdateTimeMs: 1761323230999 } MaxMemoryUsage: 1048576 2025-10-24T16:27:10.999936Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [10:1769:3031], task: 66, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 78379 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 77039 FinishTimeMs: 1761323224690 InputRows: 1 InputBytes: 15 OutputRows: 1 OutputBytes: 15 ResultRows: 1 ResultBytes: 15 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 77001 HostName: "ghrun-aakgzge3ui" NodeId: 10 StartTimeMs: 1761323224690 CreateTimeMs: 1761323224576 CurrentWaitOutputTimeUs: 6309701 UpdateTimeMs: 1761323230999 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:10.999957Z node 10 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [10:1763:2927] TxId: 281474976710662. Ctx: { TraceId: 01k8bgq2yz2hefzmc52q6dy4xp, Database: , SessionId: ydb://session/3?node_id=10&id=YWIxMTkwNTAtZTJiZDdmOWMtNWE0ZGFiZC04ZmMzNTA4MQ==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [10:1769:3031], 2025-10-24T16:27:11.295591Z node 11 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[11:1306:2388];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:263;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,1000000,0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,100,0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpScheme::AlterTransfer_QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9223, MsgBus: 18889 2025-10-24T16:26:34.713226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:26:34.750740Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:26:34.750822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:26:34.750832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008b5/r3tmp/tmpniEX6n/pdisk_1.dat 2025-10-24T16:26:34.825217Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:26:34.826236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:26:34.826275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:26:34.826391Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323194275141 != 1761323194275144 2025-10-24T16:26:34.857481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9223, node 1 2025-10-24T16:26:34.921200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:26:34.921220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:26:34.921225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:26:34.921323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18889 TClient is connected to server localhost:18889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:26:34.970673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:26:35.003720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:26:35.098657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:26:35.273969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:26:35.380816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:26:35.525740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:26:35.725469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:26:36.029192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1706:3311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.029235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.029321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1779:3330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.029327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:36.033415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:36.187613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:36.413542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:36.611725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:36.844777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:37.048045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:37.343341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:37.602751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:26:37.936474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2592:3971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.936627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.936789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2596:3975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:26:37.936806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:2 ... meshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-10-24T16:26:59.565477Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:176: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710694 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.565515Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710694 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.565614Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-10-24T16:26:59.576976Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-10-24T16:26:59.577486Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:176: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710695 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.577526Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710695 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "user" PasswordSecretName: "password_secret_name" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.577633Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-10-24T16:26:59.577725Z node 8 :REPLICATION_CONTROLLER TRACE: secret_resolver.cpp:31: [SecretResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/transfer TableId: [72057594046644480:20:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTransfer DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-10-24T16:26:59.577819Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:276: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-10-24T16:26:59.577831Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:21: [controller 72075186224037930][TxResolveSecretResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvResolveSecretResult { ReplicationId: 1 Success: 0 Error: No such secret: USId:root@builtin:password_secret_name } 2025-10-24T16:26:59.577835Z node 8 :REPLICATION_CONTROLLER ERROR: tx_resolve_secret_result.cpp:45: [controller 72075186224037930][TxResolveSecretResult] Resolve secret error: rid# 1, error# No such secret: USId:root@builtin:password_secret_name 2025-10-24T16:26:59.577895Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_resolve_secret_result.cpp:59: [controller 72075186224037930][TxResolveSecretResult] Complete 2025-10-24T16:26:59.582859Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976710696:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-10-24T16:26:59.583257Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:176: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710696 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.583293Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710696 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> { RETURN <| id:$x._offset |> };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.583396Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-10-24T16:26:59.591714Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-10-24T16:26:59.592198Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:176: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710697 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.592240Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710697 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.592293Z node 8 :REPLICATION_CONTROLLER NOTICE: tx_alter_replication.cpp:128: [controller 72075186224037930][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 20] 2025-10-24T16:26:59.592426Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-10-24T16:26:59.592486Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:231: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvRequestCreateStream 2025-10-24T16:26:59.605554Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTransfer, opId: 281474976710698:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp:528) 2025-10-24T16:26:59.606114Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:176: [controller 72075186224037930] Handle NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710698 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" DirectoryPath: "/Root" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.606169Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:22: [controller 72075186224037930][TxAlterReplication] Execute: NKikimrReplication.TEvAlterReplication PathId { OwnerId: 72057594046644480 LocalId: 20 } OperationId { TxId: 281474976710698 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:2135" Database: "/Root" StaticCredentials { User: "new_user" Password: "***" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/Root/topic" DstPath: "/Root/table" TransformLambda: "$__ydb_transfer_lambda = ($x) -> {\n RETURN CAST($x as String);\n };\n" DirectoryPath: "/Root" } RunAsUser: "root@builtin" } } 2025-10-24T16:26:59.606221Z node 8 :REPLICATION_CONTROLLER NOTICE: tx_alter_replication.cpp:128: [controller 72075186224037930][TxAlterReplication] Alter replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 20] 2025-10-24T16:26:59.606303Z node 8 :REPLICATION_CONTROLLER DEBUG: tx_alter_replication.cpp:137: [controller 72075186224037930][TxAlterReplication] Complete 2025-10-24T16:26:59.606375Z node 8 :REPLICATION_CONTROLLER TRACE: controller.cpp:231: [controller 72075186224037930] Handle NKikimr::NReplication::NController::TEvPrivate::TEvRequestCreateStream >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,100,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,100,0.5] |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,1000000,0] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,1000000,0.5] |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |59.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] |59.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |59.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> KqpSchemeFulltext::AlterTableWithIndexNoFeatureFlag-UseQueryClient [GOOD] >> KqpSchemeFulltext::CreateTableWithIndexCaseInsensitive+UseQueryClient |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,1000000,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,1000000,0.5] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,100,0.5] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,1000000,0] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,0,1000000,0.5] [GOOD] >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,10,0,0.5] |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/py3test >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,1000000,0] [GOOD] >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,1000000,0.5] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,10,0,0.5] [GOOD] >> KqpSchemeFulltext::CreateTableWithIndexCaseInsensitive+UseQueryClient [GOOD] >> KqpSchemeFulltext::CreateTableWithIndexCaseInsensitive-UseQueryClient |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::RestoreJsonArrayVariants[2,true,1,10,0,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 63901, MsgBus: 9981 2025-10-24T16:27:12.973145Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825681324210315:2145];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:12.973325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000b03/r3tmp/tmpOFLDHg/pdisk_1.dat 2025-10-24T16:27:12.999727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:13.009525Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:13.009688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825681324210199:2081] 1761323232972524 != 1761323232972527 TServer::EnableGrpc on GrpcPort 63901, node 1 2025-10-24T16:27:13.014827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:13.014843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:13.014845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:13.014873Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9981 TClient is connected to server localhost:9981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:13.076532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:13.076554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:13.077640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:13.082693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:27:13.259976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:13.268824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825685619178175:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.268843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.268888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825685619178185:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.268898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.296918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:27:13.309342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:13.309374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:13.309387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:13.309408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:13.309414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:13.309435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:13.309437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:13.309449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:13.309462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:13.309462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:13.309478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:13.309478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:13.309496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:13.309496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:13.309525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:13.309530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:13.309542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:13.309543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:13.309557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:13.309558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:13.309574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825685619178241:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:27:13.309574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825685619178247:2324];tablet_id=72075186224037888;process=TTxIn ... 86224037888;tx_id=281474976710658;this=124718262447808;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323237244;max=18446744073709551615;plan=0;src=[6:7564825697758236935:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.244239Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[6:7564825702053204607:2323];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976710658;this=124718262448160;method=TTxController::StartProposeOnExecute;tx_info=281474976710658:TX_KIND_SCHEMA;min=1761323237244;max=18446744073709551615;plan=0;src=[6:7564825697758236935:2146];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245950Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245964Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245970Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245973Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245976Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.245978Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:27:17.251804Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204709:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.251825Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.251894Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204711:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.251905Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.254213Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:17.256602Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:27:17.256602Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_EXTRACTOR_CLASS_NAME`=`JSON_SCANNER`, `SCAN_FIRST_LEVEL_ONLY`=`false`, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`true`, `COLUMNS_LIMIT`=`1`, `SPARSED_DETECTOR_KFF`=`10`, `MEM_LIMIT_CHUNK`=`0`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:27:17.260694Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204745:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.260724Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.260775Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204747:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.260784Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.263480Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:17.267632Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; 2025-10-24T16:27:17.267633Z node 6 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1 ChunkMemoryLimit: 0 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('["a", {"v" : 4}, 1,2,3,4,5,6,7,8,9,10,11,12]')) 2025-10-24T16:27:17.272107Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204782:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.272136Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.272138Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204787:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.272161Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825702053204789:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.272169Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.272863Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:17.275179Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825702053204790:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-10-24T16:27:17.356520Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825702053204842:2465] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] OUTPUT: [[1u;["[\"a\",{\"v\":\"4\"},\"1\",\"2\",\"3\",\"4\",\"5\",\"6\",\"7\",\"8\",\"9\",\"10\",\"11\",\"12\"]"]]] INDEX:0/0/0 HEADER:0/0/0 |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,0,1000000,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,0,0] |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,1000000,0.5] [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId >> KqpSchemeFulltext::CreateTableWithIndexCaseInsensitive-UseQueryClient [GOOD] >> KqpSchemeFulltext::CreateTableWithIndexInvalidSettings+UseQueryClient ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DoubleFilterReduceScopeWithPredicateVariantsWithSeparatedColumnAtFirst[1,false,1024,1000,1000000,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 24016, MsgBus: 13468 2025-10-24T16:27:12.796175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825679453180987:2145];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:12.796269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/000f4c/r3tmp/tmpRR4bsh/pdisk_1.dat 2025-10-24T16:27:12.823769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:12.833310Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825679453180871:2081] 1761323232795513 != 1761323232795516 2025-10-24T16:27:12.835668Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24016, node 1 2025-10-24T16:27:12.840085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:12.840094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:12.840095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:12.840127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13468 TClient is connected to server localhost:13468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:12.899759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:12.899787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:12.900813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:12.904897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, Col3 UTF8, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:27:13.006909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:13.107775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825683748148847:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.107800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.107883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825683748148857:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.107893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.136226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:27:13.144604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:13.144684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:13.144732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:13.144756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:13.144783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:13.144808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:13.144836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:13.144861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:13.144884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:13.144912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:13.144939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:27:13.144952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:27:13.144976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825683748148910:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:27:13.145560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:27:13.145576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:27:13.145589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:27:13.145598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:27:13.145617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:27:13.145625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:27:13.145636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:27:13.145645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:27:13.145653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:27:13.145657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:27:13.145664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037 ... 888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-10-24T16:27:17.963358Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-10-24T16:27:17.963366Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:27:17.963375Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:27:17.963382Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:27:17.963393Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:27:17.963409Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:27:17.963422Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:27:17.963435Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:27:17.963445Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:27:17.965579Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825704518180482:2323];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715658;this=126812046559584;method=TTxController::StartProposeOnExecute;tx_info=281474976715658:TX_KIND_SCHEMA;min=1761323237965;max=18446744073709551615;plan=0;src=[7:7564825704518180100:2142];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.967261Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.967283Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:27:17.967286Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=UPSERT_OPTIONS, `SCAN_READER_POLICY_NAME`=`SIMPLE`) 2025-10-24T16:27:17.973753Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180554:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.973781Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.973895Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180556:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.973909Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.976640Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:17.978928Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: ALTER OBJECT `/Root/ColumnTable` (TYPE TABLE) SET (ACTION=ALTER_COLUMN, NAME=Col2, `DATA_ACCESSOR_CONSTRUCTOR.CLASS_NAME`=`SUB_COLUMNS`, `FORCE_SIMD_PARSING`=`false`, `COLUMNS_LIMIT`=`1024`, `SPARSED_DETECTOR_KFF`=`1000`, `MEM_LIMIT_CHUNK`=`1000000`, `OTHERS_ALLOWED_FRACTION`=`0.5`) 2025-10-24T16:27:17.983993Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180586:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.984020Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.984064Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180588:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.984069Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.987298Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:17.989695Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:131;event=schema_will_not_be_ignored;last_version=2;to_version=3;diff=Version: 3 UpsertColumns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 1000 ColumnsLimit: 1024 ChunkMemoryLimit: 1000000 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: false } } } } } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false ScanReaderPolicyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2, Col3) VALUES(1u, JsonDocument('{"a" : "value_a", "b" : "b1", "c" : "c1"}'), "value1"), (2u, JsonDocument('{"a" : "value_a"}'), "value1"), (3u, JsonDocument('{"a" : "value_a", "b" : "value_b"}'), "value2"), (4u, JsonDocument('{"b" : "value_b", "a" : "a4dsadasdasdasdsdasdasdas"}'), "value4") 2025-10-24T16:27:17.995215Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180619:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.995246Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.995301Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180624:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.995314Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825704518180625:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.995325Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:17.996165Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:18.003671Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825704518180628:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:27:18.091744Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825708813147975:2438] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` WHERE JSON_VALUE(Col2, "$.a") = "value_a" AND JSON_VALUE(Col2, "$.b") = "value_b" AND Col1 > 1 ORDER BY Col1; COMPARE: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] OUTPUT: [[3u;["{\"a\":\"value_a\",\"b\":\"value_b\"}"];["value2"]]] INDEX:2/0/0 HEADER:0/0/0 |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> AnalyzeColumnshard::AnalyzeMultiOperationId |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> AnalyzeDatashard::DropTableNavigateError |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00318f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit_log.0lc846gs.txt 2025-10-24T16:27:10.205943Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> KqpSchemeFulltext::CreateTableWithIndexInvalidSettings+UseQueryClient [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,0,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,0,0.5] |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> TraverseColumnShard::TraverseServerlessColumnTable >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> TraverseDatashard::TraverseOneTableServerless >> AnalyzeColumnshard::AnalyzeTable >> AnalyzeDatashard::AnalyzeOneTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scheme/unittest >> KqpSchemeFulltext::CreateTableWithIndexInvalidSettings+UseQueryClient [GOOD] Test command err: Trying to start YDB, gRPC: 27487, MsgBus: 61386 2025-10-24T16:27:12.693892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825681080315987:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:12.693968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0008af/r3tmp/tmpIL0XlQ/pdisk_1.dat 2025-10-24T16:27:12.718601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:12.728710Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27487, node 1 2025-10-24T16:27:12.736339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:12.736347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:12.736349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:12.736375Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61386 TClient is connected to server localhost:61386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:12.799192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:12.799214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:12.800278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:12.801680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:12.810212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:12.824456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:12.838226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:12.846534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:12.963279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825681080317481:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:12.963296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:12.963333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825681080317491:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:12.963337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:12.971810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:13.001116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.007282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.019492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.026630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.033767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.040600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.047788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.054912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:13.070991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825685375285671:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.071014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.071044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825685375285676:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.071048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825685375285677:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.071053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.071511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... 5690Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.196801Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:19.197462Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:19.240086Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:19.252264Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:19.279338Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:19.294522Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:19.328541Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:19.579957Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825710461210285:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.579984Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.583124Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825710461210367:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.583201Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.584038Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.592345Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.600713Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.615379Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.629781Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.643778Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.657279Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.671994Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:19.687996Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825710461211157:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.688033Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.688057Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825710461211162:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.688063Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825710461211164:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.688071Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:19.689003Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:19.698013Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825710461211166:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:27:19.751740Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825710461211218:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:20.042137Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825714756178818:2525], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:9:29: Error: At function: KiCreateTable!
:10:54: Error: Invalid tokenizer: asdf 2025-10-24T16:27:20.042260Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=NWQ0NGQ0ZDUtZmNlMDU3OTMtZmYxOWEyMWUtODBlMjExMzk=, ActorId: [7:7564825714756178811:2521], ActorState: ExecuteState, TraceId: 01k8bgqjk520vjq1fw53tdzw1z, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:27:20.047248Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825714756178833:2529], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:9:29: Error: At function: KiCreateTable!
:9:29: Error: tokenizer should be set 2025-10-24T16:27:20.047331Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=NDY2MDQ0LThjMTg4NzliLTM1M2RjMThkLWM4NWNhNmZj, ActorId: [7:7564825714756178831:2528], ActorState: ExecuteState, TraceId: 01k8bgqjkb64a6sakmpn3z3yk4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:27:20.052567Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [7:7564825714756178841:2533], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:9:29: Error: At function: KiCreateTable!
:9:29: Error: layout should be set 2025-10-24T16:27:20.052665Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=7&id=Y2I1NThkYjctMTBjNjRhODgtZDUzODY3MGQtZTAxZTUz, ActorId: [7:7564825714756178839:2532], ActorState: ExecuteState, TraceId: 01k8bgqjkg2xznxwc8syeyptd3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze >> AnalyzeDatashard::AnalyzeTwoTables >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> TraverseColumnShard::TraverseColumnTableRebootColumnshard |60.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,0,0.5] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,100,0] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> TraverseColumnShard::TraverseColumnTable |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,100,0] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,100,0.5] >> AnalyzeColumnshard::AnalyzeServerless >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] |60.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |60.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |60.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |60.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,100,0.5] [GOOD] |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpOlapJson::DuplicationCompactionVariants[2,true,1024,10,100,0.5] [GOOD] Test command err: Trying to start YDB, gRPC: 15390, MsgBus: 15909 2025-10-24T16:27:13.142350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825684956653895:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:13.142462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00125a/r3tmp/tmpcnv8az/pdisk_1.dat 2025-10-24T16:27:13.168171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:13.176544Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:13.176672Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825684956653786:2081] 1761323233141670 != 1761323233141673 TServer::EnableGrpc on GrpcPort 15390, node 1 2025-10-24T16:27:13.182291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:13.182300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:13.182301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:13.182338Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15909 TClient is connected to server localhost:15909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:13.223953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... EXECUTE: CREATE TABLE `/Root/ColumnTable` ( Col1 Uint64 NOT NULL, Col2 JsonDocument, PRIMARY KEY (Col1) ) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 2); 2025-10-24T16:27:13.248359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:13.248388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:13.249414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:13.335556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:13.407525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825684956654460:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.407553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.407629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825684956654470:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.407639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:13.433117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:27:13.441107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:13.441144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:13.441168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:13.441184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:13.441200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:13.441222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:13.441241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:13.441258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:13.441279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:13.441295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:13.441311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:27:13.441327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:27:13.441345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825684956654531:2323];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:27:13.441707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:27:13.441722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:27:13.441730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:27:13.441732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:27:13.441744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:27:13.441749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:27:13.441757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:27:13.441759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:27:13.441763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:27:13.441766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:27:13.441770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process ... icyName: "SIMPLE" } ; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "a1"}')), (2u, JsonDocument('{"a" : "a2"}')), (3u, JsonDocument('{"b" : "b3"}')), (4u, JsonDocument('{"b" : "b4", "a" : "a4"}')) 2025-10-24T16:27:24.978872Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825733854996364:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.978913Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.979092Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825733854996369:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.979099Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825733854996370:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.979116Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.980223Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:24.983934Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825733854996373:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-10-24T16:27:25.072089Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825738149963720:2491] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:25.099557Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037888;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; 2025-10-24T16:27:25.099648Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=10;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1, Col2) VALUES(1u, JsonDocument('{"a" : "1a1"}')), (2u, JsonDocument('{"a" : "1a2"}')), (3u, JsonDocument('{"b" : "1b3"}')) 2025-10-24T16:27:25.168369Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=18;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; EXECUTE: REPLACE INTO `/Root/ColumnTable` (Col1) VALUES(2u) 2025-10-24T16:27:25.204276Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;local_tx_no=24;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037889;tx_state=complete;fline=columnshard_impl.cpp:489;event=skip_compaction;reason=disabled; WAIT_COMPACTION: 0 2025-10-24T16:27:25.371718Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WAIT_COMPACTION: 0 WAIT_COMPACTION: 0 2025-10-24T16:27:25.864172Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:27:25.864191Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:27:25.864196Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:27:25.864394Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244926;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:27:25.864411Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244954;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:27:25.864416Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244989;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:27:25.864533Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[7:7564825733854996159:2324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323245017;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761323245017 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:27:25.864811Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:27:25.864815Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:27:25.864819Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=tables_manager.cpp:574;event=schema_to_remove;reason=empty;from=3; 2025-10-24T16:27:25.864929Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244926;tx_id=281474976710658;;is_diff=0;version=1; 2025-10-24T16:27:25.864937Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244954;tx_id=281474976710659;;is_diff=1;version=2; 2025-10-24T16:27:25.864941Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=1761323244989;tx_id=281474976710660;;is_diff=1;version=3; 2025-10-24T16:27:25.864987Z node 7 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[7:7564825733854996153:2323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=1761323245017;tx_id=281474976710661;;new_schema=Id: 0 SinceStep: 1761323245017 SinceTxId: 281474976710661 Schema { Columns { Id: 1 Name: "Col1" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Col2" Type: "JsonDocument" TypeId: 4612 NotNull: false StorageId: "" DefaultValue { } DataAccessorConstructor { ClassName: "SUB_COLUMNS" SubColumns { Settings { SparsedDetectorKff: 10 ColumnsLimit: 1024 ChunkMemoryLimit: 100 OthersAllowedFraction: 0.5 DataExtractor { ClassName: "JSON_SCANNER" JsonScanner { FirstLevelOnly: false ForceSIMDJsonParsing: true } } } } } ColumnFamilyId: 0 } KeyColumnNames: "Col1" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 4 Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } ScanReaderPolicyName: "SIMPLE" } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; COMPACTION_HAPPENED: 0 -> 1 EXECUTE: PRAGMA OptimizeSimpleILIKE; PRAGMA AnsiLike;SELECT * FROM `/Root/ColumnTable` ORDER BY Col1; COMPARE: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] OUTPUT: [[1u;["{\"a\":\"1a1\"}"]];[2u;#];[3u;["{\"b\":\"1b3\"}"]];[4u;["{\"a\":\"a4\",\"b\":\"b4\"}"]]] INDEX:0/0/0 HEADER:0/0/0 >> TraverseDatashard::TraverseOneTableServerless [GOOD] |60.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |60.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots >> TraverseDatashard::TraverseOneTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-10-24T16:27:21.045799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.069006Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.069078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.069106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002aaf/r3tmp/tmprvAT7G/pdisk_1.dat 2025-10-24T16:27:21.162006Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.194289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.194326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.226725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14093, node 1 2025-10-24T16:27:21.304897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.304912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.304915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.305015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.307474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.332367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15868 2025-10-24T16:27:21.674469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.482287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.485471Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.486512Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.496124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.496155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.521407Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.522125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.617682Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.617741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.633645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.705125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.726863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727166Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727386Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727540Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727567Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727660Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.727705Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.797578Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.807215Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.807260Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.814676Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.815166Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.815205Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.815213Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.815221Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.815228Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.815235Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.815244Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.815614Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.815950Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:22.816379Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-10-24T16:27:22.819250Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.819267Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.819279Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:22.825090Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:22.826108Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:22.828011Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.828053Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.830239Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:22.830313Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:22.908191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-10-24T16:27:22.933081Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.086140Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:23.087832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.089862Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.089894Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:23.201279Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:23.434580Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.927905Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.953665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:24.541918Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:24.557007Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-10-24T16:27:24.557030Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8270: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-10-24T16:27:24.557054Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:2581:2935], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-10-24T16:27:24.557943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2586:2937] 2025-10-24T16:27:24.558081Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2586:2937], schemeshard id = 72075186224037899 2025-10-24T16:27:25.790727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2685:3234], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.790782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.790875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2703:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.790890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.795037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:26.012923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2989:3284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.012971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.022203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2993:3287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.022255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.022657Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2996:3290]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.022694Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.022739Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-10-24T16:27:26.022748Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2999:3293] 2025-10-24T16:27:26.022771Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2999:3293] 2025-10-24T16:27:26.022993Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:3000:3156] 2025-10-24T16:27:26.023075Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2999:3293], server id = [2:3000:3156], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:26.023129Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3000:3156], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:26.023138Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:26.023194Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:26.023204Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2996:3290], StatRequests.size() = 1 2025-10-24T16:27:26.026596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:26.026682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3004:3297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.026727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.026823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3007:3300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.026858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.026892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3011:3304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.028567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:26.139312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:26.139335Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:26.159997Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:2999:3293], schemeshard count = 1 2025-10-24T16:27:26.354979Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3013:3306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:27:26.516450Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:3128:3376] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:26.519609Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3151:3392]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.519663Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:26.519670Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:3151:3392], StatRequests.size() = 1 2025-10-24T16:27:26.529155Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k8bgqrdwcbezwcze7ey9y4bt, Database: , SessionId: ydb://session/3?node_id=1&id=MmM5OWMwNTQtNzk0NWVlYWMtMTE0NDY5NjMtOTBiZGJhODg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:26.551168Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3193:3201]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.551911Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.551923Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:26.551990Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.552000Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:26.552010Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:26.555617Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:26.555715Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 1 >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-10-24T16:27:19.827937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.866418Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.866515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.866555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002af4/r3tmp/tmpJGZcNu/pdisk_1.dat 2025-10-24T16:27:19.977099Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.009958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.009997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.050038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7718, node 1 2025-10-24T16:27:20.152117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.152138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.152144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.152346Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.156186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.182175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9149 2025-10-24T16:27:20.554175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.419238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.421437Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.422416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.430681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.430716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.453671Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.454227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.546324Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.546351Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.546630Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.546733Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.546830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.546943Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.547019Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.547034Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.547053Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.547085Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.547107Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.558744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.606743Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.610279Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.610301Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.617749Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.617933Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.617955Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.617962Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.617969Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.617976Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.617982Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.617988Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.618104Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.636343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1873:2609] 2025-10-24T16:27:21.637110Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-10-24T16:27:21.637672Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.637691Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1884:2618], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.640894Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2640] 2025-10-24T16:27:21.641149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2640], schemeshard id = 72075186224037897 2025-10-24T16:27:21.641663Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.641675Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.641688Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:21.645193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.646410Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.646431Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:21.703993Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.785507Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.829151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-10-24T16:27:21.908260Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:22.023336Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.734445Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.761010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.452763Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.471035Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-10-24T16:27:23.471060Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8270: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-10-24T16:27:23.471077Z node 2 :STATIST ... .994097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3417:3439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.994144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.003671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3421:3442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.003722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.004199Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3424:3445]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.004267Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.004313Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-10-24T16:27:26.004322Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:3427:3448] 2025-10-24T16:27:26.004342Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:3427:3448] 2025-10-24T16:27:26.004528Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:3428:3324] 2025-10-24T16:27:26.004602Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:3427:3448], server id = [2:3428:3324], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:26.004674Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3428:3324], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:26.004683Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:26.004737Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:26.004747Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:3424:3445], StatRequests.size() = 1 2025-10-24T16:27:26.008062Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:26.008235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3432:3452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.008298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.008483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3436:3456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.008514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3439:3459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.008530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.010407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:26.231300Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:26.231334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:26.284590Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:3427:3448], schemeshard count = 1 2025-10-24T16:27:26.554859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3441:3461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-10-24T16:27:26.661074Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:3563:3536] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:26.663538Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3586:3552]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.663601Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:26.663608Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:3586:3552], StatRequests.size() = 1 2025-10-24T16:27:26.672784Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715666. Ctx: { TraceId: 01k8bgqrd9213av54akyr0aks5, Database: , SessionId: ydb://session/3?node_id=1&id=NjA3YmVmYjQtZjBlOTg5ZjktNTllZTdhYy1jNmI0MDMyYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:26.712641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:26.945110Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3941:3616]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.945176Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:26.945318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:177: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-10-24T16:27:26.945330Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:26.945394Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:26.945406Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 3, ReplyToActorId = [1:3941:3616], StatRequests.size() = 1 2025-10-24T16:27:26.948682Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:26.951009Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3950:3625]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.951086Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-10-24T16:27:26.951095Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 4, ReplyToActorId = [1:3950:3625], StatRequests.size() = 1 2025-10-24T16:27:26.960928Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715668. Ctx: { TraceId: 01k8bgqsax1jeck12g14302yd0, Database: , SessionId: ydb://session/3?node_id=1&id=ZmJmMmM0ZTItNTNlNDVmMDEtODY2MDdmMzQtYzFmNjNiOWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:26.994183Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3994:3588]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:26.995033Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.995050Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:26.995097Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:26.995108Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:26.995117Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:27.014377Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:27.014527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 1 2025-10-24T16:27:27.014637Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4018:3600]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:27.015328Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:27.015341Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:27.015437Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:27.015449Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:27.015458Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:27.024393Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-10-24T16:27:27.024483Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 2 |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution >> TraverseDatashard::TraverseTwoTablesServerless >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] |60.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |60.1%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-10-24T16:27:21.090084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.119519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.121360Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.121463Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.121481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ab5/r3tmp/tmpnzamOb/pdisk_1.dat 2025-10-24T16:27:21.202645Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.235527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.235561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.256805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11378, node 1 2025-10-24T16:27:21.316271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.316293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.316298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.316382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.318480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.353241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15566 2025-10-24T16:27:21.702242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.529063Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.531117Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.532275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.541871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.541924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.577988Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.580052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.658760Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.658810Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.659156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659283Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659440Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659587Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659608Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659671Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659698Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659728Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.659756Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.682158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.761701Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.771605Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.771632Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.776475Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.776633Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.776647Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.776651Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.776655Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.776659Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.776663Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.776667Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.776744Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.777475Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1613:2453] 2025-10-24T16:27:22.782001Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.785257Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.785276Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.785286Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.788359Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.788379Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1845:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.789786Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1863:2606] 2025-10-24T16:27:22.789853Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1863:2606], schemeshard id = 72075186224037897 2025-10-24T16:27:22.794168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.795841Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.795898Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:22.847544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.912494Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.960386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.054269Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.192329Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.896078Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.952405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2210:3050], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.952446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.952563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3055], DatabaseId: /Root, PoolId: default, Failed to fetch ... de 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:25.838183Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=1;to=2; 2025-10-24T16:27:25.838392Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=2020;tx_id=281474976715659;;is_diff=0;version=1; 2025-10-24T16:27:25.838765Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=3080;tx_id=281474976715660;;new_schema=Id: 0 SinceStep: 3080 SinceTxId: 281474976715660 Schema { Columns { Id: 1 Name: "Key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Value" Type: "Uint64" TypeId: 4 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Key" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 4 Version: 2 Indexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:27:25.951313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2643:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.951417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.951826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2648:3146], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.951843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.953341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:26.002258Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2025-10-24T16:27:26.491933Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=2;to=3; 2025-10-24T16:27:26.492055Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=3080;tx_id=281474976715660;;is_diff=0;version=2; 2025-10-24T16:27:26.492162Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=4100;tx_id=281474976715661;;new_schema=Id: 0 SinceStep: 4100 SinceTxId: 281474976715661 Schema { Columns { Id: 1 Name: "Key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Value" Type: "Uint64" TypeId: 4 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Key" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 4 Version: 3 Indexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; 2025-10-24T16:27:26.636715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2766:3193], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.636764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.639457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2771:3197], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.639503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.640465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp:331) 2025-10-24T16:27:26.660834Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2025-10-24T16:27:27.272315Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=tables_manager.cpp:566;event=schema_to_remove;reason=equal_to_use;from=3;to=4; 2025-10-24T16:27:27.272496Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:905;event=useless_schema_removed;address=0,plan_step=4100;tx_id=281474976715661;;is_diff=0;version=3; 2025-10-24T16:27:27.272614Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2282:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:913;event=schema_updated;address=0,plan_step=5110;tx_id=281474976715662;;new_schema=Id: 0 SinceStep: 5110 SinceTxId: 281474976715662 Schema { Columns { Id: 1 Name: "Key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "Value" Type: "Uint64" TypeId: 4 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "Key" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 5 Version: 4 Indexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Indexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ; waiting actualization: 0/0.000012s FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=35;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=36;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=34;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=54e629ca-b0f611f0-ab452b79-afaea7ba; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=26344;delta=8160; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=26344;delta=8160; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=528;delta=25816; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=528;delta=25816; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=0;delta=528; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=528; |60.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> TResourcePoolTestReboots::SimpleDropResourcePoolWithReboots [GOOD] |60.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TraverseDatashard::TraverseTwoTables |60.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-ARROW ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_resource_pool_reboots/unittest >> TResourcePoolTestReboots::SimpleDropResourcePoolWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:27:14.192046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:14.192064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:14.192068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:14.192073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:14.192078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:14.192081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:14.192087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:14.192101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:14.192192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:14.192228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:14.206509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:14.206534Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:14.206605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:27:14.208924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:14.208988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:14.209027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:14.210893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:14.210986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:14.211057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:14.250670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:14.252127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:14.252178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:14.252380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:14.252392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:14.252417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:14.252425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:14.252430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:14.252454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:27:14.253582Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:27:14.270822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:14.270899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:14.270959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:14.270966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:14.271006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:14.271019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:14.271754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:14.271805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:14.271888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:14.271898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:14.271903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:14.271909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:14.272316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:14.272328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:14.272334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:14.272666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:14.272674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:14.272680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:14.272687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:14.273349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:14.273698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:14.273748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:14.273925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:14.273943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... 27:30.503180Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-10-24T16:27:30.503217Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:27:30.503226Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-10-24T16:27:30.503251Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:30.503256Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [58:211:2211], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-10-24T16:27:30.503261Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [58:211:2211], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2025-10-24T16:27:30.503266Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [58:211:2211], at schemeshard: 72057594046678944, txId: 1004, path id: 6 2025-10-24T16:27:30.503323Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-10-24T16:27:30.503331Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-10-24T16:27:30.503351Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1004:0 progress is 1/1 2025-10-24T16:27:30.503356Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-10-24T16:27:30.503362Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1004:0 progress is 1/1 2025-10-24T16:27:30.503365Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-10-24T16:27:30.503371Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-10-24T16:27:30.503377Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-10-24T16:27:30.503383Z node 58 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1004:0 2025-10-24T16:27:30.503387Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1004:0 2025-10-24T16:27:30.503399Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-10-24T16:27:30.503406Z node 58 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-10-24T16:27:30.503411Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 7 2025-10-24T16:27:30.503415Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 7 2025-10-24T16:27:30.503419Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 6], 18446744073709551615 2025-10-24T16:27:30.503526Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.503538Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.503544Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-10-24T16:27:30.503549Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-10-24T16:27:30.503554Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-10-24T16:27:30.503615Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:27:30.503621Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-10-24T16:27:30.503633Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-10-24T16:27:30.503684Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.503693Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.503697Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-10-24T16:27:30.503701Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 7 2025-10-24T16:27:30.503706Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:27:30.504042Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.504060Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.504065Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-10-24T16:27:30.504071Z node 58 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 7 2025-10-24T16:27:30.504076Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-10-24T16:27:30.504090Z node 58 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-10-24T16:27:30.504706Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.504758Z node 58 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:27:30.504901Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-10-24T16:27:30.504979Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-10-24T16:27:30.505043Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-10-24T16:27:30.505052Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-10-24T16:27:30.505138Z node 58 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-10-24T16:27:30.505164Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-10-24T16:27:30.505169Z node 58 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [58:404:2393] TestWaitNotification: OK eventTxId 1004 2025-10-24T16:27:30.505257Z node 58 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/ResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:30.505295Z node 58 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/ResourcePool" took 52us result status StatusPathDoesNotExist 2025-10-24T16:27:30.505341Z node 58 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/ResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 5]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/.metadata/workload_manager/pools/ResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 5 LastExistedPrefixDescription { Self { Name: "pools" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |60.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |60.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> YdbProxy::DropTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-YDB_VALUE >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> TraverseDatashard::TraverseOneTable [GOOD] >> YdbProxy::DescribeTopic [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-10-24T16:27:27.968697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:28.001739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:28.004608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:28.004718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:28.004735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a00/r3tmp/tmpyGuh67/pdisk_1.dat 2025-10-24T16:27:28.087970Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:28.120898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:28.120936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:28.142283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18038, node 1 2025-10-24T16:27:28.204911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:28.204935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:28.204938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:28.205019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:28.208240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:28.232651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12861 2025-10-24T16:27:28.577860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:29.552397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:29.554425Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:29.555483Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:29.565205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:29.565249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:29.601593Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:29.605056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:29.691912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:29.691968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:29.692369Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692517Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692680Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692846Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692883Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692961Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.692982Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.693012Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.693042Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:29.710263Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:29.768818Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:29.779595Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:29.779636Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:29.788628Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:29.788968Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:29.788991Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:29.788995Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:29.789000Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:29.789004Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:29.789008Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:29.789015Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:29.789133Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:29.790205Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1613:2453] 2025-10-24T16:27:29.792218Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:29.794599Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:29.794621Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:29.794635Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:29.797821Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:29.797851Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1845:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:29.800544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1863:2606] 2025-10-24T16:27:29.800648Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1863:2606], schemeshard id = 72075186224037897 2025-10-24T16:27:29.808486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:29.810533Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:29.810577Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:29.872888Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:29.921098Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:29.964806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:30.068064Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:30.183254Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:30.835191Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:30.872045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2218:3055], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:30.872086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:30.872169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3060], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:30.872176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:30.875501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:31.074442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2511:3104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.074513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.074692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2515:3107], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.074701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.075031Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2518:3110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:31.075067Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:31.075079Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2520:3112] 2025-10-24T16:27:31.075101Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2520:3112] 2025-10-24T16:27:31.075263Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2521:2969] 2025-10-24T16:27:31.075335Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2520:3112], server id = [2:2521:2969], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:31.075372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2521:2969], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:31.075380Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:31.075439Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:31.075449Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2518:3110], StatRequests.size() = 1 2025-10-24T16:27:31.077783Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:31.078430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2525:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.078459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.078571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2529:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.078579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.078605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2532:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.080085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:31.193056Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:31.193095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:31.285773Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:2520:3112], schemeshard count = 1 2025-10-24T16:27:31.583380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2534:3125], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:27:31.696523Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:2643:3197] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:31.699557Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2666:3213]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:31.699601Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:31.699606Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:2666:3213], StatRequests.size() = 1 2025-10-24T16:27:31.708414Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgqxbv2smgh9t0tcd055jh, Database: , SessionId: ydb://session/3?node_id=1&id=NzZkN2JlNDItNjFiZmIzZTctYjFmNjk1MzYtNGUwMWVkMmM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:31.741023Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2716:3012]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:31.741528Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:31.741537Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:31.741579Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:31.741586Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:31.741592Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:31.744656Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:31.744733Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 1 >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-CSV ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-10-24T16:27:31.562031Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825761417264275:2151];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:31.562064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001a0d/r3tmp/tmp84AXoA/pdisk_1.dat 2025-10-24T16:27:31.602295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:31.610708Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:9724 TServer::EnableGrpc on GrpcPort 27231, node 1 2025-10-24T16:27:31.639166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:31.639183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:31.639185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:31.639233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:31.664878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:31.664903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:31.666046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:31.671135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:31.771681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:31.868977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:31.886645Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-10-24T16:27:31.886891Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825761417264946:2391] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-10-24T16:27:32.048637Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825767068110668:2079];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:32.048883Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001a0d/r3tmp/tmpIKzYFU/pdisk_1.dat 2025-10-24T16:27:32.051102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:32.064883Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:29447 TServer::EnableGrpc on GrpcPort 24103, node 2 2025-10-24T16:27:32.085257Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:32.085269Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:32.085271Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:32.085318Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:32.117476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:32.150294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:32.150337Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:32.151609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |60.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/fxx3/00313d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit_log.4_77a8pw.txt 2025-10-24T16:27:26.334711Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-10-24T16:27:26.334690Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-10-24T16:27:26.304349Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-COLUMNSHARD-CSV [GOOD] >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-DATASHARD-ARROW |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-DATASHARD-ARROW [GOOD] >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-DATASHARD-CSV |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TSchemeShardSysViewTestReboots::CreateSysView |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TReplicationTests::CreateSequential >> KqpBoolColumnShard::TestFilterEqual-SCAN_QUERY-DATASHARD-CSV [GOOD] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-YDB_VALUE ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-10-24T16:27:29.282181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:29.321766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:29.321878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:29.321926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0029f6/r3tmp/tmpXiSuSw/pdisk_1.dat 2025-10-24T16:27:29.471079Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:29.509594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:29.509645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:29.565369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21082, node 1 2025-10-24T16:27:29.678421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:29.678446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:29.678451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:29.678636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:29.682270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:29.746141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20945 2025-10-24T16:27:30.054788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:30.790678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:30.793495Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:30.794378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:30.802537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:30.802571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:30.825743Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:30.826455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:30.912469Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:30.912527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:30.925253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:30.993480Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:31.014879Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015074Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015218Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015322Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015338Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015363Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015406Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015424Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.015439Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:31.085356Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:31.091458Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:31.091487Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:31.098221Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:31.098615Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:31.098647Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:31.098654Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:31.098662Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:31.098669Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:31.098676Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:31.098684Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:31.098999Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:31.099262Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:31.099657Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-10-24T16:27:31.102358Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:31.102383Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:31.102396Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:31.106609Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:31.107309Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:31.109181Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:31.109217Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:31.110827Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:31.110879Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:31.181204Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:31.214595Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-10-24T16:27:31.328272Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:31.329975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:31.331186Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:31.331213Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:31.411961Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:31.568083Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:32.115673Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:32.140797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb ... ervice] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.618726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:33.768652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2979:3272], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.768717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.778050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2983:3275], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.778097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.778521Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2986:3278]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:33.778550Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:33.778580Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-10-24T16:27:33.778588Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2989:3281] 2025-10-24T16:27:33.778607Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2989:3281] 2025-10-24T16:27:33.778789Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2990:3156] 2025-10-24T16:27:33.778879Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2989:3281], server id = [2:2990:3156], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:33.778905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2990:3156], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:33.778913Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:33.778952Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:33.778960Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2986:3278], StatRequests.size() = 1 2025-10-24T16:27:33.782083Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:33.782227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2994:3285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.782260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.782374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2998:3289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.782386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.782428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3001:3292], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:33.784400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:33.874119Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:33.874158Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:33.894781Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:2989:3281], schemeshard count = 1 2025-10-24T16:27:34.072355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3003:3294], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-10-24T16:27:34.232140Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:3116:3361] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:34.235323Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3139:3377]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.235388Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:34.235397Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:3139:3377], StatRequests.size() = 1 2025-10-24T16:27:34.245924Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k8bgr008bzg8j4j33r2jjcw1, Database: , SessionId: ydb://session/3?node_id=1&id=OTAwY2I2N2EtYzI1NzRiYzUtYjVjMmI3MjYtOGYxY2U2NzA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:34.276951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:34.482303Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3467:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.482376Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:34.482385Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 3, ReplyToActorId = [1:3467:3440], StatRequests.size() = 1 2025-10-24T16:27:34.487767Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3476:3449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.487825Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-10-24T16:27:34.487833Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 4, ReplyToActorId = [1:3476:3449], StatRequests.size() = 1 2025-10-24T16:27:34.497373Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715666. Ctx: { TraceId: 01k8bgr0peebh89ydp9rkg61jb, Database: , SessionId: ydb://session/3?node_id=1&id=ZDBiMzgzOTktODI4NDA4YWYtMTFhM2NjOWMtZGJhYzJiNzQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:34.547568Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3521:3402]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.548426Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:34.548442Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:34.548500Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:34.548510Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:34.548520Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:34.552676Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:34.552794Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 1 2025-10-24T16:27:34.552887Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3545:3414]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.553442Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:34.553452Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:34.553489Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:34.553499Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:34.553508Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:34.554045Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-10-24T16:27:34.554088Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 2 >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TraverseDatashard::TraverseTwoTables [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> TExternalDataSourceTest::CreateExternalDataSource >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-YDB_VALUE [GOOD] >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-CSV >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-10-24T16:27:31.349740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:31.377341Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:31.377446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:31.377488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0029e3/r3tmp/tmp2Gu6N7/pdisk_1.dat 2025-10-24T16:27:31.475269Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:31.508149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:31.508197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:31.541155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9144, node 1 2025-10-24T16:27:31.621822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:31.621843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:31.621848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:31.622009Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:31.625708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:31.672960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6488 2025-10-24T16:27:31.970785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:32.757053Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:32.758786Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:32.766906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:32.766940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:32.813162Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:32.813986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:32.892866Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:32.892912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:32.893296Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893450Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893777Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893801Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893900Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893928Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.893979Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:32.905892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:32.962172Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:32.969239Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:32.969266Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:32.977022Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:32.977217Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:32.977244Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:32.977250Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:32.977257Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:32.977264Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:32.977270Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:32.977278Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:32.977463Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:32.977812Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1611:2452] 2025-10-24T16:27:32.979236Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:32.981000Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:32.981015Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:32.981029Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:32.984193Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:32.984217Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1846:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:32.986101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1864:2607] 2025-10-24T16:27:32.986354Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1864:2607], schemeshard id = 72075186224037897 2025-10-24T16:27:32.991370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:32.993329Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:32.993362Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:33.066373Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:33.119265Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:33.194747Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:33.265494Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:33.411528Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:34.063260Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:34.106288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2220:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.106336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.106414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.106419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.110113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:34.360153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2522:3108], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.360231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.360478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2526:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.360490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.360917Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2529:3114]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:34.360957Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:34.360974Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2531:3116] 2025-10-24T16:27:34.360996Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2531:3116] 2025-10-24T16:27:34.361187Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2532:2977] 2025-10-24T16:27:34.361270Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2531:3116], server id = [2:2532:2977], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:34.361318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2532:2977], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:34.361328Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:34.361386Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:34.361398Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2529:3114], StatRequests.size() = 1 2025-10-24T16:27:34.364964Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:34.365156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2536:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.365177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.365272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2540:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.365282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.365302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2543:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:34.366893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:34.466446Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:34.466478Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:34.559449Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:2531:3116], schemeshard count = 1 2025-10-24T16:27:34.883958Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2545:3129], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:27:34.999947Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:2650:3197] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:35.002750Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2673:3213]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:35.002807Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:35.002815Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:2673:3213], StatRequests.size() = 1 2025-10-24T16:27:35.011186Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgr0jf8yvwtcr3fyxze754, Database: , SessionId: ydb://session/3?node_id=1&id=MmQzZjYxODEtNjE4NmFjODEtNjY3OGUwNC0yZThmNTQzYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:35.042574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:35.253518Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3017:3276]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:35.253598Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:35.253609Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 3, ReplyToActorId = [1:3017:3276], StatRequests.size() = 1 2025-10-24T16:27:35.258352Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3026:3285]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:35.258414Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-10-24T16:27:35.258424Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 4, ReplyToActorId = [1:3026:3285], StatRequests.size() = 1 2025-10-24T16:27:35.266816Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k8bgr1ejaxyzt21g3yd1xjf6, Database: , SessionId: ydb://session/3?node_id=1&id=Yzc1MGMzNTItYjQyZDEzZDEtZjc5Y2JiLWZlMzcyZjdk, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:35.329357Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3076:3237]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:35.330425Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:35.330441Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:35.330507Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:35.330517Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:35.330526Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:35.334141Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:35.334247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 1 2025-10-24T16:27:35.334327Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3100:3249]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:35.334850Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:35.334860Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:35.334910Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:35.334919Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:35.334927Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:35.335396Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-10-24T16:27:35.335438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 2 >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:35.964236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:35.964260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.964266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:35.964275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:35.964282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:35.964287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:35.964297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.964310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:35.964444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.964519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:35.987046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:35.987081Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.987197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.990629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:35.990668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:35.990728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:35.992467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:35.992614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:35.992713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.992769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:35.993744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.993804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:35.994075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:35.994089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.994117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:35.994126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:35.994132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:35.994185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.995598Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:36.014737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:36.014793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.014836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:36.014842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:36.014907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:36.014916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:36.015444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:36.015480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:36.015521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.015531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:36.015537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:36.015542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:36.015907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.015919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:36.015924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:36.016267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.016275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.016280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.016284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:36.016789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:36.017146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:36.017185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:36.017374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:36.017392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:36.017397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.017459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:36.017467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.017493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:36.017504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:36.017871Z node 1 :FLAT_TX_SCHEMESHARD ... 4046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-10-24T16:27:36.036027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:36.036055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:36.036064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-10-24T16:27:36.036096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:36.036109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-10-24T16:27:36.036143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:36.036151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:27:36.036353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:27:36.036411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-10-24T16:27:36.036749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:36.036759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:36.036781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:27:36.036800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:36.036805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-10-24T16:27:36.036810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-10-24T16:27:36.036820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.036826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-10-24T16:27:36.036839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:27:36.036843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:27:36.036849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-10-24T16:27:36.036852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:27:36.036857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-10-24T16:27:36.036862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-10-24T16:27:36.036867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-10-24T16:27:36.036871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-10-24T16:27:36.036883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:36.036888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-10-24T16:27:36.036893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-10-24T16:27:36.036897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-10-24T16:27:36.037007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:27:36.037022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:27:36.037027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:27:36.037032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:27:36.037037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:27:36.037071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:27:36.037076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:27:36.037084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:36.037155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:27:36.037165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-10-24T16:27:36.037169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-10-24T16:27:36.037174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-10-24T16:27:36.037178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:36.037186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-10-24T16:27:36.037989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-10-24T16:27:36.038014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:27:36.038025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-10-24T16:27:36.038081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-10-24T16:27:36.038089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-10-24T16:27:36.038166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-10-24T16:27:36.038180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:27:36.038184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:395:2384] TestWaitNotification: OK eventTxId 104 2025-10-24T16:27:36.038264Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:36.038294Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 39us result status StatusPathDoesNotExist 2025-10-24T16:27:36.038328Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:35.947801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:35.947828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.947834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:35.947841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:35.947866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:35.947872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:35.947882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.947900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:35.948036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.948108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:35.968864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:35.968900Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.969003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.972509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:35.972544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:35.972596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:35.974270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:35.974486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:35.974620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.974717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:35.975987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.976040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:35.976333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:35.976345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.976369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:35.976379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:35.976386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:35.976442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.978142Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:36.002939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:36.003039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.003115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:36.003127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:36.003222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:36.003241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:36.004090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:36.004150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:36.004215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.004229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:36.004235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:36.004244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:36.004716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.004731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:36.004737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:36.005120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.005132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:36.005139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.005146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:36.005846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:36.006324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:36.006387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:36.006620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:36.006649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:36.006657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.006737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:36.006745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:36.006781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:36.006794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:36.007235Z node 1 :FLAT_TX_SCHEMESHARD ... 678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:27:36.293967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:36.294121Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:36.294137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:36.294157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:27:36.294167Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:27:36.294172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:36.294183Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:27:36.295048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:27:36.295267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:27:36.295330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:27:36.295339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:27:36.295412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:27:36.295430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:27:36.295436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:308:2298] TestWaitNotification: OK eventTxId 101 2025-10-24T16:27:36.295511Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:36.295554Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 56us result status StatusSuccess 2025-10-24T16:27:36.295675Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-10-24T16:27:36.296732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:36.296793Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-10-24T16:27:36.296808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-10-24T16:27:36.296840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, at schemeshard: 72057594046678944 2025-10-24T16:27:36.300680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-10-24T16:27:36.300757Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:27:36.300872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:27:36.300881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:27:36.300972Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:27:36.300998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:27:36.301004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:316:2306] TestWaitNotification: OK eventTxId 102 2025-10-24T16:27:36.301087Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:36.301135Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 58us result status StatusSuccess 2025-10-24T16:27:36.301251Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |60.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-CSV [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/olap/unittest >> KqpBoolColumnShard::TestFilterEqual-EXECUTE_QUERY-DATASHARD-CSV [GOOD] Test command err: Trying to start YDB, gRPC: 10978, MsgBus: 13923 2025-10-24T16:27:31.097463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825760856327452:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:31.097478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001259/r3tmp/tmpyy7Kjq/pdisk_1.dat 2025-10-24T16:27:31.142472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:31.142471Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:31.142738Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825760856327428:2081] 1761323251097366 != 1761323251097369 TServer::EnableGrpc on GrpcPort 10978, node 1 2025-10-24T16:27:31.204792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:31.204828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:31.205808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:31.209150Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:31.209158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:31.209161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:31.209202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13923 TClient is connected to server localhost:13923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:31.267777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:31.301156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions CREATE TABLE `/Root/Table1` (id Int32 NOT NULL, int Int64, b Bool, PRIMARY KEY (id)) PARTITION BY HASH(id) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1); 2025-10-24T16:27:31.496366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825760856328109:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.496396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.496449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825760856328119:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.496458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.534089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:27:31.542469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:31.542527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:31.542583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:31.542603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:31.542616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:31.542629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:31.542648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:31.542661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:31.542674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:31.542689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:31.542707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:27:31.542721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:27:31.542738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825760856328172:2326];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:27:31.543165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:27:31.543180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:27:31.543190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:27:31.543192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:27:31.543207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-10-24T16:27:31.543209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-10-24T16:27:31.543217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-10-24T16:27:31.543220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-10-24T16:27:31.543225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-10-24T16:27:31.543227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-10-24T16:27:31.543231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execu ... t, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.729775Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:35.742210Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825779624534542:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.742243Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825779624534547:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.742249Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.742314Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564825779624534550:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.742330Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:35.742929Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:35.748739Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7564825779624534549:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:27:35.811364Z node 6 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [6:7564825779624534602:2401] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Trying to start YDB, gRPC: 21625, MsgBus: 3829 2025-10-24T16:27:36.143247Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7564825782987238992:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:36.143604Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001259/r3tmp/tmpLQPKYx/pdisk_1.dat 2025-10-24T16:27:36.153508Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:36.165177Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:36.165210Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:36.166184Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:36.167497Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:7564825782987238965:2081] 1761323256142789 != 1761323256142792 2025-10-24T16:27:36.169710Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21625, node 7 2025-10-24T16:27:36.178111Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:36.178127Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:36.178131Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:36.178194Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3829 TClient is connected to server localhost:3829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:36.262239Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:36.264319Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:27:36.367013Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:36.544943Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825782987239642:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.544970Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.545039Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825782987239651:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.545050Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.550330Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:36.568510Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825782987239752:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.568534Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.568563Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825782987239757:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.568607Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564825782987239759:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.568624Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:36.569476Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:36.571410Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564825782987239761:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:27:36.663242Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564825782987239812:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |60.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] |60.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |60.4%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> TSequence::CreateSequenceParallel >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TSequence::CreateSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:27:37.557586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:37.557611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:37.557615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:37.557623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:37.557629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:37.557632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:37.557640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:37.557652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:37.557750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:37.557816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:37.567978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:37.568009Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:37.570756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:37.570806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:37.570864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:37.572201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:37.572419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:37.572519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:37.572611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:37.573425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:37.573476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:37.573700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:37.573707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:37.573730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:37.573735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:37.573740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:37.573763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.574911Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:27:37.589798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:37.589900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.589969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:37.589977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:37.590056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:37.590067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:37.590908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:37.590958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:37.591017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.591025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:37.591029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:37.591034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:37.591427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.591445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:37.591455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:37.591864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.591875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.591881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:37.591887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:37.592401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:37.592785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:37.592840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:37.593034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:37.593053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:37.593059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:37.593113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:37.593121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:37.593150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:37.593160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:37.593529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:37.593537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:37.598213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-10-24T16:27:37.598235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-10-24T16:27:37.598288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 100:0 128 -> 240 2025-10-24T16:27:37.598340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:37.598349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:37.598624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:37.598655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:27:37.598667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-10-24T16:27:37.598673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-10-24T16:27:37.598733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-10-24T16:27:37.598738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-10-24T16:27:37.598747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:27:37.598751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:27:37.598754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#100:0 progress is 1/1 2025-10-24T16:27:37.598756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:27:37.598759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-10-24T16:27:37.598763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-10-24T16:27:37.598766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 100:0 2025-10-24T16:27:37.598769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 100:0 2025-10-24T16:27:37.598776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:27:37.598781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-10-24T16:27:37.598784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:27:37.598786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-10-24T16:27:37.598846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:27:37.598856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:27:37.598859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:27:37.598862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:27:37.598865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:37.598902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:27:37.598907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-10-24T16:27:37.598909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-10-24T16:27:37.598911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:27:37.598914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:37.598919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-10-24T16:27:37.599419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-10-24T16:27:37.599436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-10-24T16:27:37.599471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-10-24T16:27:37.599477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-10-24T16:27:37.599521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-10-24T16:27:37.599536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-10-24T16:27:37.599539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:314:2303] TestWaitNotification: OK eventTxId 100 2025-10-24T16:27:37.599595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:37.599627Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 37us result status StatusSuccess 2025-10-24T16:27:37.599694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:27:35.393486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:35.393509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.393514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:35.393522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:35.393528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:35.393532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:35.393542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.393556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:35.393670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.393727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:35.407964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:35.407996Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.411355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:35.411391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:35.411447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:35.413119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:35.413327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:35.413428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.413516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:35.414474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.414530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:35.414760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:35.414769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.414803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:35.414810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:35.414816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:35.414847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.416289Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:27:35.436572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:35.436667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.436741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:35.436749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:35.436819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:35.436832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:35.437896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.437960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:35.438040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.438052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:35.438059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:35.438065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:35.438564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.438576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:35.438584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:35.438994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.439005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.439010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:35.439018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:35.439644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:35.440095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:35.440172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:35.440395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.440419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:35.440426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:35.440487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:35.440493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:35.440525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:35.440538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:35.440959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:35.440967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ration is done id#102:0 progress is 1/1 2025-10-24T16:27:38.149760Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:27:38.149765Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:27:38.149775Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:27:38.149781Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:27:38.149786Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:27:38.149817Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:38.149825Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:27:38.149830Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:27:38.149834Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-10-24T16:27:38.150026Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [10:215:2215], Recipient [10:138:2159]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-10-24T16:27:38.150037Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-10-24T16:27:38.150052Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:38.150065Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:38.150071Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:27:38.150077Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:27:38.150082Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:38.150098Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:38.150259Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [10:215:2215], Recipient [10:138:2159]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-10-24T16:27:38.150268Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-10-24T16:27:38.150279Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:38.150311Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:38.150321Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:27:38.150327Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-10-24T16:27:38.150332Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:27:38.150349Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:27:38.150354Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:38.150627Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435084, Sender [10:138:2159], Recipient [10:138:2159]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-10-24T16:27:38.150639Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5419: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-10-24T16:27:38.150649Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:27:38.150655Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-10-24T16:27:38.150670Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:38.151137Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:38.151375Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:38.151385Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:38.151679Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:38.151690Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:38.151729Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:27:38.151794Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:27:38.151803Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:27:38.151896Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [10:454:2418], Recipient [10:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:38.151905Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:38.151911Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-10-24T16:27:38.151938Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [10:403:2367], Recipient [10:138:2159]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-10-24T16:27:38.151944Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-10-24T16:27:38.151956Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:27:38.151979Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:27:38.151985Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:452:2416] 2025-10-24T16:27:38.152015Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [10:454:2418], Recipient [10:138:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:38.152021Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:38.152025Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-10-24T16:27:38.152089Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [10:455:2419], Recipient [10:138:2159]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:27:38.152095Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:27:38.152108Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:38.152149Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 37us result status StatusPathDoesNotExist 2025-10-24T16:27:38.152189Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence |60.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:39.189449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:39.189481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.189487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:39.189497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:39.189505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:39.189509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:39.189518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.189536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:39.189679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.189751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:39.210130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:39.210185Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:39.210314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.213534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:39.213580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:39.213661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:39.215534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:39.215713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:39.215840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.215948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:39.217454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.217501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:39.217715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.217723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.217742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:39.217748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.217753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:39.217799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.218984Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:39.237565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.237673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.237755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:39.237764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:39.237872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:39.237889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:39.238928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.238983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:39.239046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.239058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:39.239064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:39.239070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:39.239487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.239499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:39.239508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:39.239884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.239894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.239900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.239908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.240552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:39.241146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:39.241229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:39.241517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.241553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.241563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.241657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:39.241667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.241707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.241723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:39.242302Z node 1 :FLAT_TX_SCHEMESHARD ... MESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.254312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:35: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-10-24T16:27:39.254338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-10-24T16:27:39.254365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.254375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:39.254457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:39.254748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:27:39.255046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.255053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.255090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:27:39.255104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:27:39.255116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.255122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-10-24T16:27:39.255127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:27:39.255131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:214:2214], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:27:39.255187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.255195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:27:39.255209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:27:39.255213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:27:39.255219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:27:39.255222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:27:39.255226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-10-24T16:27:39.255232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:27:39.255237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:27:39.255241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:27:39.255255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:27:39.255262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-10-24T16:27:39.255266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-10-24T16:27:39.255270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:27:39.255395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:39.255406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:39.255411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:27:39.255416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:27:39.255419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:39.255727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:39.255741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:27:39.255746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:27:39.255751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:27:39.255755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:39.255766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-10-24T16:27:39.256215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:39.256521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:27:39.256596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:27:39.256605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-10-24T16:27:39.256689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:27:39.256709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:27:39.256714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:340:2329] TestWaitNotification: OK eventTxId 102 2025-10-24T16:27:39.256795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:39.256833Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 50us result status StatusSuccess 2025-10-24T16:27:39.256935Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |60.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TExternalDataSourceTest::ReadOnlyMode >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> KqpSysColV1::InnerJoinTables >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] |60.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |60.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> KqpSysColV0::InnerJoinSelect >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:39.873356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:39.873375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.873379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:39.873384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:39.873388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:39.873391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:39.873397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.873408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:39.873492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.873535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:39.889634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:39.889658Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:39.889732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.891998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:39.892022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:39.892062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:39.893675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:39.893849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:39.893947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.894008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:39.894823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.894867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:39.895057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.895066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.895082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:39.895087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.895092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:39.895129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.896337Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:39.914163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.914235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.914299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:39.914306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:39.914372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:39.914384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:39.915073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:39.915147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:39.915159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:39.915163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:39.915438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:39.915664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.915674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.915679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.916134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:39.916459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:39.916502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:39.916652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.916671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.916677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.916735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:39.916741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.916763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.916773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:39.917058Z node 1 :FLAT_TX_SCHEMESHARD ... ist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-10-24T16:27:39.923496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-10-24T16:27:39.923516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-10-24T16:27:39.923817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.923839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-10-24T16:27:39.924469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.924515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-10-24T16:27:39.924528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-10-24T16:27:39.924557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-10-24T16:27:39.924983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.925016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-10-24T16:27:39.925684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.925728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-10-24T16:27:39.925738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-10-24T16:27:39.925757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-10-24T16:27:39.926129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.926159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-10-24T16:27:39.926715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.926745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-10-24T16:27:39.926757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-10-24T16:27:39.926771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, at schemeshard: 72057594046678944 2025-10-24T16:27:39.927132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.927162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:102, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:39.890406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:39.890428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.890434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:39.890440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:39.890446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:39.890451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:39.890460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.890472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:39.890577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.890634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:39.913439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:39.913472Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:39.913571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.916479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:39.916552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:39.916605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:39.919260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:39.919339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:39.919444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.919619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:39.920271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.920308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:39.920530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.920540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.920560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:39.920567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.920574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:39.920608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.921854Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:27:39.943519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.943597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.943655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:39.943663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:39.943748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:39.943764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:39.944601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.944645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:39.944692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.944703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:39.944710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:39.944716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:39.945155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.945166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:39.945172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:39.945556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.945567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.945574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.945581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.946223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:39.946602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:39.946654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:39.946857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.946881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.946888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.946949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:39.946957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.946983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.946996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:39.947501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.947511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.947554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.947560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:27:39.947615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.947623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:27:39.947636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:27:39.947642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.947647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:27:39.947650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.947654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:27:39.947659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.947664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:27:39.947668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:27:39.947680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:27:39.947688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:27:39.947707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:27:39.948083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:27:39.948103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:27:39.948109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:27:39.948115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:27:39.948121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.948136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:27:39.948978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:27:39.949099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-10-24T16:27:39.949327Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:276:2265] Bootstrap 2025-10-24T16:27:39.949547Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:276:2265] Become StateWork (SchemeCache [1:281:2270]) 2025-10-24T16:27:39.950220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.950285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-10-24T16:27:39.950298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-10-24T16:27:39.950304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-10-24T16:27:39.950464Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:39.951426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.951496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-10-24T16:27:39.951583Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:27:39.951640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:27:39.951647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:27:39.951717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:27:39.951737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:27:39.951740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:291:2280] TestWaitNotification: OK eventTxId 101 2025-10-24T16:27:39.951804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:39.951831Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 35us result status StatusPathDoesNotExist 2025-10-24T16:27:39.951892Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] |60.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |60.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:39.811021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:39.811040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.811045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:39.811050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:39.811065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:39.811068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:39.811075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:39.811084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:39.811173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.811217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:39.829557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:39.829591Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:39.829689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:39.833100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:39.833136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:39.833206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:39.834651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:39.834884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:39.835020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.835110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:39.836333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.836373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:39.836599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:39.836608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:39.836626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:39.836635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:39.836641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:39.836683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.837962Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:39.855601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:39.855658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.855716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:39.855722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:39.855791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:39.855800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:39.856413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.856442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:39.856477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.856484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:39.856487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:39.856491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:39.856750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.856758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:39.856763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:39.857009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.857018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:39.857023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.857029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:39.857462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:39.857788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:39.857818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:39.857955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:39.857971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:39.857986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.858031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:39.858037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:39.858062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:39.858073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:39.858408Z node 1 :FLAT_TX_SCHEMESHARD ... 44 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113611Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113621Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 11us result status StatusSuccess 2025-10-24T16:27:40.113650Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113705Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113713Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 9us result status StatusSuccess 2025-10-24T16:27:40.113754Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113795Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113804Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 10us result status StatusSuccess 2025-10-24T16:27:40.113829Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113856Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.113863Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 8us result status StatusSuccess 2025-10-24T16:27:40.113887Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:27:37.990680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:37.990714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:37.990721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:37.990730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:37.990738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:37.990743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:37.990756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:37.990772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:37.990908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:37.990986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:38.009096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:38.009129Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:38.017992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:38.018040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:38.018098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:38.020378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:38.020975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:38.021136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.021257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:38.022428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:38.022476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:38.022778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:38.022788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:38.022831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:38.022841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:38.022847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:38.022886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.034762Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:27:38.119349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:38.119453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.119530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:38.119540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:38.119631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:38.119649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:38.122896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.122966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:38.123049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.123062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:38.123070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:38.123078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:38.123686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.123716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:38.123723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:38.124186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.124200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.124208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.124218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:38.125144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:38.126133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:38.126212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:38.126473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.126515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:38.126534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.126623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:38.126631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.126677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:38.126694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:38.127335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:38.127350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... son remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-10-24T16:27:40.271814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-10-24T16:27:40.271817Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.272303Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.272312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-10-24T16:27:40.272325Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:347:2324] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-10-24T16:27:40.272347Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [7:130:2155], Recipient [7:130:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:27:40.272352Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-10-24T16:27:40.272359Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.272364Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:40.272411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-10-24T16:27:40.272432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.272436Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-10-24T16:27:40.272440Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-10-24T16:27:40.272445Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-10-24T16:27:40.272448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-10-24T16:27:40.272452Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-10-24T16:27:40.272461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:420:2377] message: TxId: 102 2025-10-24T16:27:40.272467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-10-24T16:27:40.272474Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:27:40.272483Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:27:40.272503Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:27:40.272509Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-10-24T16:27:40.272512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:1 2025-10-24T16:27:40.272517Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:27:40.272521Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-10-24T16:27:40.272524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:2 2025-10-24T16:27:40.272531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:27:40.272535Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-10-24T16:27:40.272538Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:3 2025-10-24T16:27:40.272545Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-10-24T16:27:40.272638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.272643Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.272660Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435084, Sender [7:130:2155], Recipient [7:130:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-10-24T16:27:40.272664Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5419: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-10-24T16:27:40.272671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-10-24T16:27:40.272676Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-10-24T16:27:40.272686Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:27:40.272703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.272706Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.272725Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.272729Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.272734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.272740Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.272745Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.272748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.273144Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.273193Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:27:40.273197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.273412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.273428Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:420:2377] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-10-24T16:27:40.273461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.273468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:527:2476] 2025-10-24T16:27:40.273486Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-10-24T16:27:40.273518Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [7:529:2478], Recipient [7:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:40.273523Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:40.273526Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-10-24T16:27:40.273602Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [7:604:2553], Recipient [7:130:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:27:40.273606Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:27:40.273616Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.273647Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 26us result status StatusPathDoesNotExist 2025-10-24T16:27:40.273676Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::PartitionStatsRange2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:40.125114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:40.125133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:40.125137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:40.125143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:40.125148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:40.125151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:40.125156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:40.125169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:40.125254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:40.125303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:40.142872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:40.142901Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:40.142975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:40.145581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:40.145608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:40.145653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:40.146738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:40.146881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:40.146956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.147014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:40.147802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:40.147830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:40.148061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:40.148074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:40.148093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:40.148100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:40.148107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:40.148148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.149470Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:40.171564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:40.171654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.171732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:40.171741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:40.171841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:40.171871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:40.172830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.172872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:40.172926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.172939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:40.172945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:40.172951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:40.173451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.173472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:40.173478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:40.173855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.173865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.173869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.173874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:40.174422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:40.174912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:40.174972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:40.175194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.175222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.175230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.175301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:40.175309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.175342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:40.175354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:40.175818Z node 1 :FLAT_TX_SCHEMESHARD ... chemeshard: 72057594046678944 2025-10-24T16:27:40.429841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.429845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.429857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-10-24T16:27:40.429876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-10-24T16:27:40.429891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.429895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.429905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.429909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.429919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-10-24T16:27:40.429934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-10-24T16:27:40.429955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2025-10-24T16:27:40.429967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.429971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.429983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.429987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 113, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 115, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2025-10-24T16:27:40.430230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:398:2388] 2025-10-24T16:27:40.430382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.430390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:398:2388] TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 2025-10-24T16:27:40.430988Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.431049Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 81us result status StatusSuccess 2025-10-24T16:27:40.431161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:27:40.017829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:40.017859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:40.017865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:40.017873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:40.017881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:40.017886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:40.017896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:40.017913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:40.018050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:40.018124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:40.039119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:40.039164Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:40.039276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:40.043065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:40.043109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:40.043180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:40.044967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:40.045149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:40.045282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.045368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:40.046270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:40.046307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:40.046527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:40.046535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:40.046551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:40.046557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:40.046562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:40.046603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.047825Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:27:40.069314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:40.069401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.069466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:40.069473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:40.069545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:40.069558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:40.070324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.070364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:40.070412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.070420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:40.070425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:40.070429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:40.070742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.070751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:40.070755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:40.071034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.071045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.071051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.071058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:40.071648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:40.072065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:40.072122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:40.072293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.072319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.072327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.072388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:40.072393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:40.072421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:40.072430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:40.072911Z node 1 :FLAT_TX_SCHEMESHARD ... : DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:27:40.511426Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:40.511440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:40.511444Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:27:40.511448Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:27:40.511453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-10-24T16:27:40.511957Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:40.511977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-10-24T16:27:40.511983Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-10-24T16:27:40.511988Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-10-24T16:27:40.511994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:27:40.512011Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-10-24T16:27:40.512574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:27:40.512885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-10-24T16:27:40.513088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-10-24T16:27:40.513146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:27:40.513154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:27:40.513232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:27:40.513250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.513255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:340:2330] TestWaitNotification: OK eventTxId 101 2025-10-24T16:27:40.513333Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.513374Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 52us result status StatusSuccess 2025-10-24T16:27:40.513488Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-10-24T16:27:40.514510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:40.514553Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-10-24T16:27:40.514569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-10-24T16:27:40.520305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.520392Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:27:40.520506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:27:40.520514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:27:40.520611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:27:40.520636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.520641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:348:2338] TestWaitNotification: OK eventTxId 103 2025-10-24T16:27:40.520720Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:40.520767Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 57us result status StatusSuccess 2025-10-24T16:27:40.520882Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSequence::AlterTableSetDefaultFromSequence [GOOD] |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] >> StatisticsSaveLoad::ForbidAccess |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 15977, MsgBus: 8011 2025-10-24T16:27:40.109207Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825799584969793:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:40.109230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002e1c/r3tmp/tmpx8JaZJ/pdisk_1.dat 2025-10-24T16:27:40.141000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:40.148723Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:40.148911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825799584969766:2081] 1761323260108930 != 1761323260108933 TServer::EnableGrpc on GrpcPort 15977, node 1 2025-10-24T16:27:40.156336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:40.156356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:40.156358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:40.156410Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8011 TClient is connected to server localhost:8011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:40.217426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:40.217451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:40.218409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:40.218511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-10-24T16:27:40.223241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.241349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.260124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.270953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.297881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:40.475298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825799584971413:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.475325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.475431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825799584971423:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.475445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.539455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.546622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.558584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.573889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.586630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.600494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.614372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.628931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.644387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825799584972284:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.644399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825799584972289:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.644415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.644463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825799584972292:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.644479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.645147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:40.648228Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825799584972291:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:27:40.726476Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825799584972345:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:27:38.095782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:38.095812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:38.095818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:38.095829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:38.095836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:38.095841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:38.095866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:38.095889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:38.096027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:38.096105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:38.113061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:38.113090Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:38.116714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:38.116753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:38.116829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:38.118075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:38.118220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:38.118337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.118429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:38.119174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:38.119213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:38.119454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:38.119466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:38.119494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:38.119503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:38.119510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:38.119540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.121240Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:27:38.149635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:38.149703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.149755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:27:38.149762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:27:38.149830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:27:38.149841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:38.150600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.150670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:27:38.150729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.150739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:27:38.150744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:27:38.150749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:27:38.151230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.151250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:27:38.151255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:27:38.151607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.151618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:27:38.151625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.151633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:27:38.152366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:27:38.152963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:27:38.153037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:27:38.153283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:38.153311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:38.153327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.153397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:27:38.153405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:27:38.153434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:27:38.153445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:27:38.154914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:38.154922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 3409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 295 } } CommitVersion { Step: 5000014 TxId: 114 } 2025-10-24T16:27:40.838791Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.838925Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [7:1054:2990], Recipient [7:130:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:40.838931Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:40.838934Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-10-24T16:27:40.838957Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269551620, Sender [7:996:2940], Recipient [7:130:2155]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 996 RawX2: 30064774012 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-10-24T16:27:40.838960Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5262: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-10-24T16:27:40.838966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 996 RawX2: 30064774012 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-10-24T16:27:40.838969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-10-24T16:27:40.838978Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 996 RawX2: 30064774012 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-10-24T16:27:40.838985Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:27:40.838991Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 996 RawX2: 30064774012 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-10-24T16:27:40.839003Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839006Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839009Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839014Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 114:0 129 -> 240 2025-10-24T16:27:40.839032Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.839499Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.839522Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-10-24T16:27:40.839526Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.839834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-10-24T16:27:40.839841Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.839883Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839887Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.839904Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839907Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.839910Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2025-10-24T16:27:40.839924Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:996:2940] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-10-24T16:27:40.839970Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [7:130:2155], Recipient [7:130:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:27:40.839976Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-10-24T16:27:40.839982Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.839987Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2025-10-24T16:27:40.839997Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.840000Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#114:0 progress is 1/1 2025-10-24T16:27:40.840004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-10-24T16:27:40.840008Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#114:0 progress is 1/1 2025-10-24T16:27:40.840010Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-10-24T16:27:40.840013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-10-24T16:27:40.840021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:396:2363] message: TxId: 114 2025-10-24T16:27:40.840026Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-10-24T16:27:40.840031Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 114:0 2025-10-24T16:27:40.840035Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 114:0 2025-10-24T16:27:40.840056Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:27:40.840490Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:27:40.840516Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:396:2363] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-10-24T16:27:40.840572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-10-24T16:27:40.840578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1022:2958] 2025-10-24T16:27:40.840616Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [7:1024:2960], Recipient [7:130:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:40.840622Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:40.840627Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-10-24T16:27:40.840869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [7:1063:2999], Recipient [7:130:2155]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-10-24T16:27:40.840875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-10-24T16:27:40.841618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:27:40.841669Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:523: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-10-24T16:27:40.841748Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-10-24T16:27:40.841813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:27:40.842312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:40.842357Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-10-24T16:27:40.842362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 >> KqpSystemView::PartitionStatsRange2 [GOOD] |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 26053, MsgBus: 18675 2025-10-24T16:27:40.190984Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825803551819203:2146];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:40.191008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dd6/r3tmp/tmpJ6Ruth/pdisk_1.dat 2025-10-24T16:27:40.221984Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:40.222024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:40.222208Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825803551819094:2081] 1761323260190203 != 1761323260190206 TServer::EnableGrpc on GrpcPort 26053, node 1 2025-10-24T16:27:40.235666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:40.235678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:40.235680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:40.235732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18675 TClient is connected to server localhost:18675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:40.299726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:40.299750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:40.300532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:40.300856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-10-24T16:27:40.313401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.331602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.350109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.360722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.511988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:40.537229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825803551820746:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.537249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.537312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825803551820756:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.537322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.589725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.598103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.607045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.614142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.621178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.635487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.649677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.664537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.680886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825803551821618:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.680917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825803551821623:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.680920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.680993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825803551821626:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.681011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.681828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:40.690599Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825803551821625:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:27:40.766587Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825803551821679:3555] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 23300, MsgBus: 15113 2025-10-24T16:27:40.307008Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825801094004211:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:40.307023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dd3/r3tmp/tmpU21XD8/pdisk_1.dat 2025-10-24T16:27:40.339083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:40.349174Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825801094004182:2081] 1761323260306715 != 1761323260306718 2025-10-24T16:27:40.351804Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23300, node 1 2025-10-24T16:27:40.357109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:40.357124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:40.357127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:40.357180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15113 TClient is connected to server localhost:15113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:40.401762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:40.412874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.413796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:40.413821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:40.414670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:40.430977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.450454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.461244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.624255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825801094005807:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.624272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.624328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825801094005817:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.624334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.635397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:40.678520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.686113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.698993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.716280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.726489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.740949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.755104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.769304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:40.785666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825801094006702:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.785696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825801094006707:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.785703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.785752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825801094006710:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.785766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:40.786526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:40.795881Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825801094006709:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:27:40.846565Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825801094006763:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 28433, MsgBus: 14170 2025-10-24T16:27:40.761590Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825800124188877:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:40.761725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dbc/r3tmp/tmplSnpgs/pdisk_1.dat 2025-10-24T16:27:40.804738Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:40.805321Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:40.805560Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825800124188773:2081] 1761323260760826 != 1761323260760829 TServer::EnableGrpc on GrpcPort 28433, node 1 2025-10-24T16:27:40.814137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:40.814158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:40.814160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:40.814210Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14170 TClient is connected to server localhost:14170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:40.868270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:40.868302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:40.869411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:40.871112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:40.879001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.897343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.915944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:40.928033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:41.091663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825804419157693:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.091709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.091789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825804419157703:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.091804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.099802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:41.142134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.150071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.160192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.167205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.173856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.181704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.195673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.210079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:41.226032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825804419158586:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.226058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.226056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825804419158591:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.226105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825804419158593:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.226114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:41.226832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:41.229609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825804419158594:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:27:41.289482Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825804419158647:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:41.510011Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323261507, txId: 281474976715673] shutting down >> StatisticsSaveLoad::Delete >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> KqpSystemView::Sessions+EnableRealSystemViewPaths |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |60.6%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> DataShardTxOrder::DelayData >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> DataShardOutOfOrder::TestOutOfOrderLockLost >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] >> DataShardTxOrder::ZigZag ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 22942, MsgBus: 6971 2025-10-24T16:27:42.589130Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825808521868558:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:42.589225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002db3/r3tmp/tmpJT9cSX/pdisk_1.dat 2025-10-24T16:27:42.625773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:42.628388Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.628592Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825808521868529:2081] 1761323262588674 != 1761323262588677 TServer::EnableGrpc on GrpcPort 22942, node 1 2025-10-24T16:27:42.632973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:42.632981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:42.632982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:42.633011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6971 TClient is connected to server localhost:6971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:42.675632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:42.688679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:42.695555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:42.695576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:42.696662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:42.752575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:42.768936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:42.778132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:42.866370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:42.882587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825808521870183:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.882614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.882698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825808521870193:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.882710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.923212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.930883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.937494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.944980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.952169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.966421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.980818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:42.994945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.010942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825812816838351:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:43.010967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:43.010993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825812816838356:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:43.010998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825812816838358:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:43.011004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:43.011580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:43.014227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825812816838360:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:27:43.097349Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825812816838412:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:43.267348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) >> TSchemeShardSysViewTestReboots::CreateSysView [GOOD] |60.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 4025, MsgBus: 65194 2025-10-24T16:27:42.614609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825809894971265:2070];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:42.614624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002db8/r3tmp/tmpZn822a/pdisk_1.dat 2025-10-24T16:27:42.649411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:42.649462Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.649699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825809894971235:2081] 1761323262614335 != 1761323262614338 TServer::EnableGrpc on GrpcPort 4025, node 1 2025-10-24T16:27:42.666455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:42.666469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:42.666471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:42.666512Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65194 TClient is connected to server localhost:65194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1761323262699 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:42.721192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:42.721215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:42.722226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:42.722733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:42.730379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:27:42.901042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 1 2025-10-24T16:27:42.976551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825809894972404:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.976553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825809894972395:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.976567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.976615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825809894972410:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.976627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:42.977211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:42.978709Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825809894972409:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:27:43.055534Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825814189939758:2571] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ydb-cpp-sdk/dev 2025-10-24T16:27:43.482752Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323263481, txId: 281474976715673] shutting down >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview_reboots/unittest >> TSchemeShardSysViewTestReboots::CreateSysView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:27:35.165305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:27:35.165336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.165342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:27:35.165351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:27:35.165359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:27:35.165363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:27:35.165373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:27:35.165394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:27:35.165540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:27:35.165617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:27:35.191043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:27:35.191110Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.191223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:27:35.196491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:27:35.196629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:27:35.196691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:27:35.202203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:27:35.202374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:27:35.202563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:35.202653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:27:35.203573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.203635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:27:35.203946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:35.203958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:35.203983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:27:35.203992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:27:35.203998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:27:35.204024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:27:35.245702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-10-24T16:27:35.254893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 057594046316545 2025-10-24T16:27:43.731906Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-10-24T16:27:43.731928Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000039 2025-10-24T16:27:43.731979Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:27:43.731991Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 128849021032 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:27:43.731998Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 1002:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2025-10-24T16:27:43.732016Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1002:0 128 -> 240 2025-10-24T16:27:43.732035Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-10-24T16:27:43.732042Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 1002 2025-10-24T16:27:43.732453Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:27:43.732459Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:27:43.732518Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-10-24T16:27:43.732532Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:27:43.732536Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [30:214:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 2 2025-10-24T16:27:43.732539Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [30:214:2214], at schemeshard: 72057594046678944, txId: 1002, path id: 39 2025-10-24T16:27:43.732547Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-10-24T16:27:43.732551Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-10-24T16:27:43.732561Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:27:43.732564Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:27:43.732567Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1002:0 progress is 1/1 2025-10-24T16:27:43.732569Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:27:43.732572Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: false 2025-10-24T16:27:43.732576Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-10-24T16:27:43.732578Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1002:0 2025-10-24T16:27:43.732581Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1002:0 2025-10-24T16:27:43.732589Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-10-24T16:27:43.732594Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1002, publications: 2, subscribers: 0 2025-10-24T16:27:43.732597Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-10-24T16:27:43.732599Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-10-24T16:27:43.732705Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:27:43.732712Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:27:43.732726Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:27:43.732732Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-10-24T16:27:43.732736Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-10-24T16:27:43.732823Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:27:43.732830Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:27:43.732833Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:27:43.732836Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-10-24T16:27:43.732839Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-10-24T16:27:43.732845Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-10-24T16:27:43.733508Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-10-24T16:27:43.733549Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-10-24T16:27:43.733585Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:27:43.733590Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:27:43.733649Z node 30 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:27:43.733663Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:27:43.733667Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [30:738:2727] TestWaitNotification: OK eventTxId 1002 2025-10-24T16:27:43.733722Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:27:43.733746Z node 30 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 31us result status StatusSuccess 2025-10-24T16:27:43.733803Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardOutOfOrder::TestSnapshotReadPriority >> StatisticsSaveLoad::Delete [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-10-24T16:27:42.343810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:42.364238Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:42.364312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:42.364338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00223b/r3tmp/tmpxKPqXy/pdisk_1.dat 2025-10-24T16:27:42.431406Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.463468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:42.463500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:42.496147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7946, node 1 2025-10-24T16:27:42.579192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:42.579216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:42.579221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:42.579385Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:42.582615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:42.607424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23745 2025-10-24T16:27:42.935671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:43.675023Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.676838Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:43.677442Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:43.683043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.683064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.705592Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:43.706042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.784078Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.784109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.795511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.857839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:43.879043Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879208Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879346Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879435Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879451Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879483Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.879514Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.938166Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.942973Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:43.942993Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:43.949417Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:43.949543Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:43.949558Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:43.949562Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:43.949566Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:43.949571Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:43.949575Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:43.949580Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:43.949837Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:43.950184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:43.950427Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:43.952533Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:43.952547Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:43.952556Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:43.957124Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:43.957721Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:43.958420Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:43.958436Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1873:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:43.960039Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1896:2613] 2025-10-24T16:27:43.960091Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1896:2613], schemeshard id = 72075186224037897 2025-10-24T16:27:44.020192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:44.041698Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:44.176781Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:44.178208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:44.179099Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:44.179116Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:44.251600Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:44.417020Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:44.935115Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:44.935705Z node 1 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:44.935806Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2236:3063], ActorId: [1:2237:3064], Starting query actor #1 [1:2238:3065] 2025-10-24T16:27:44.935815Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2237:3064], ActorId: [1:2238:3065], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:44.937770Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2237:3064], ActorId: [1:2238:3065], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZDRkNjMxNzctMTczMjQwOGEtNTU1OWVlNTctYjQyZjU2ZDA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:44.968910Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2258:3079]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:44.968960Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:44.968970Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2260:3081] 2025-10-24T16:27:44.968979Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2260:3081] 2025-10-24T16:27:44.969098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2261:2785] 2025-10-24T16:27:44.969144Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2260:3081], server id = [2:2261:2785], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:44.969188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2261:2785], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:44.969197Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:44.969244Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:44.969251Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2258:3079], StatRequests.size() = 1 2025-10-24T16:27:44.981427Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:44.996298Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2237:3064], ActorId: [1:2238:3065], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZDRkNjMxNzctMTczMjQwOGEtNTU1OWVlNTctYjQyZjU2ZDA=, TxId: 2025-10-24T16:27:44.996321Z node 1 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [1:2237:3064], ActorId: [1:2238:3065], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZDRkNjMxNzctMTczMjQwOGEtNTU1OWVlNTctYjQyZjU2ZDA=, TxId: 2025-10-24T16:27:44.996383Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2236:3063], ActorId: [1:2237:3064], Got response [1:2238:3065] SUCCESS 2025-10-24T16:27:44.996558Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2278:3086], ActorId: [1:2279:3087], Starting query actor #1 [1:2280:3088] 2025-10-24T16:27:44.996565Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2279:3087], ActorId: [1:2280:3088], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:44.996940Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2279:3087], ActorId: [1:2280:3088], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MzdiN2E2ZjItNzhkYWVhOTMtNzgxMjQwOWItMjgxNjA4OWI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:45.000771Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2289:3097]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:45.000818Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:45.000823Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:2289:3097], StatRequests.size() = 1 2025-10-24T16:27:45.026937Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2279:3087], ActorId: [1:2280:3088], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MzdiN2E2ZjItNzhkYWVhOTMtNzgxMjQwOWItMjgxNjA4OWI=, TxId: 2025-10-24T16:27:45.026960Z node 1 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [1:2279:3087], ActorId: [1:2280:3088], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MzdiN2E2ZjItNzhkYWVhOTMtNzgxMjQwOWItMjgxNjA4OWI=, TxId: 2025-10-24T16:27:45.027007Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2278:3086], ActorId: [1:2279:3087], Got response [1:2280:3088] SUCCESS 2025-10-24T16:27:45.027211Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2311:3101], ActorId: [1:2312:3102], Starting query actor #1 [1:2313:3103] 2025-10-24T16:27:45.027218Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2312:3102], ActorId: [1:2313:3103], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:45.027587Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2312:3102], ActorId: [1:2313:3103], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=Y2M0MGI0M2ItZjQ5ZDIzZTQtODY5ODA1MjMtZGVmZWJjYjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-10-24T16:27:45.035125Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2322:3112]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:45.035177Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:45.035182Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 3, ReplyToActorId = [1:2322:3112], StatRequests.size() = 1 2025-10-24T16:27:45.058227Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2312:3102], ActorId: [1:2313:3103], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Y2M0MGI0M2ItZjQ5ZDIzZTQtODY5ODA1MjMtZGVmZWJjYjI=, TxId: 01k8bgrb0pb2bym20th4atgbqe 2025-10-24T16:27:45.058284Z node 1 :STATISTICS WARN: query_actor.cpp:375: [TQueryBase] OwnerId: [1:2312:3102], ActorId: [1:2313:3103], Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=Y2M0MGI0M2ItZjQ5ZDIzZTQtODY5ODA1MjMtZGVmZWJjYjI=, TxId: 01k8bgrb0pb2bym20th4atgbqe 2025-10-24T16:27:45.058346Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2311:3101], ActorId: [1:2312:3102], Got response [1:2313:3103] BAD_REQUEST >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> StatisticsSaveLoad::Simple [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-10-24T16:27:42.633144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:42.657969Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:42.658041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:42.658067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00222d/r3tmp/tmpHz2GiX/pdisk_1.dat 2025-10-24T16:27:42.735155Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.767638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:42.767673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:42.800513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17835, node 1 2025-10-24T16:27:42.883867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:42.883892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:42.883897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:42.884058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:42.887352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:42.912040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61517 2025-10-24T16:27:43.239176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:43.932750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.934437Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:43.935025Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:43.941201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.941226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.963824Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:43.964325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:44.044459Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:44.044487Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:44.056020Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:44.118760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:44.140269Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140603Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140694Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140706Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140721Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140755Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140772Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.140787Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:44.210011Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:44.216049Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:44.216082Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:44.222320Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:44.222654Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:44.222680Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:44.222687Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:44.222693Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:44.222700Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:44.222707Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:44.222715Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:44.222973Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:44.223178Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:44.223484Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:44.225688Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:44.225702Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:44.225713Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:44.230396Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:44.231298Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:44.232740Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:44.232775Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:44.234484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:44.234544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:44.295005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:44.316582Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:44.442136Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:44.444758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:44.446504Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:44.446546Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:44.534371Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:44.669836Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:45.386991Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:45.387509Z node 1 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:45.387587Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2236:3064], ActorId: [1:2237:3065], Starting query actor #1 [1:2238:3066] 2025-10-24T16:27:45.387596Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2237:3065], ActorId: [1:2238:3066], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:45.389489Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2237:3065], ActorId: [1:2238:3066], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZmVkN2FkNGUtOTNlZmQ0NTAtZDA1MDJlYjYtMzk5NDgwMA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:45.427055Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2258:3080]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:45.427107Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:45.427121Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2260:3082] 2025-10-24T16:27:45.427132Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2260:3082] 2025-10-24T16:27:45.427282Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2261:2784] 2025-10-24T16:27:45.427332Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2260:3082], server id = [2:2261:2784], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:45.427385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2261:2784], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:45.427392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:45.427434Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:45.427440Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2258:3080], StatRequests.size() = 1 2025-10-24T16:27:45.440264Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:45.455573Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2237:3065], ActorId: [1:2238:3066], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZmVkN2FkNGUtOTNlZmQ0NTAtZDA1MDJlYjYtMzk5NDgwMA==, TxId: 2025-10-24T16:27:45.455599Z node 1 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [1:2237:3065], ActorId: [1:2238:3066], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZmVkN2FkNGUtOTNlZmQ0NTAtZDA1MDJlYjYtMzk5NDgwMA==, TxId: 2025-10-24T16:27:45.455658Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2236:3064], ActorId: [1:2237:3065], Got response [1:2238:3066] SUCCESS 2025-10-24T16:27:45.455876Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2278:3087], ActorId: [1:2279:3088], Starting query actor #1 [1:2280:3089] 2025-10-24T16:27:45.455884Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2279:3088], ActorId: [1:2280:3089], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:45.456299Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2279:3088], ActorId: [1:2280:3089], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=Y2U3MGI1YWMtZDQ2YmU5MzMtZDQ2NTFhOS05MjZiNGZhYw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-10-24T16:27:45.463813Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2289:3098]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:45.463879Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:45.463887Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:2289:3098], StatRequests.size() = 1 2025-10-24T16:27:45.488627Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2279:3088], ActorId: [1:2280:3089], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Y2U3MGI1YWMtZDQ2YmU5MzMtZDQ2NTFhOS05MjZiNGZhYw==, TxId: 01k8bgrbe4c3f37nps9f93jv1r 2025-10-24T16:27:45.488659Z node 1 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [1:2279:3088], ActorId: [1:2280:3089], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=Y2U3MGI1YWMtZDQ2YmU5MzMtZDQ2NTFhOS05MjZiNGZhYw==, TxId: 01k8bgrbe4c3f37nps9f93jv1r 2025-10-24T16:27:45.488718Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2278:3087], ActorId: [1:2279:3088], Got response [1:2280:3089] SUCCESS 2025-10-24T16:27:45.489046Z node 1 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [1:2311:3108], ActorId: [1:2312:3109], Starting query actor #1 [1:2313:3110] 2025-10-24T16:27:45.489055Z node 1 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [1:2312:3109], ActorId: [1:2313:3110], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:45.489423Z node 1 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [1:2312:3109], ActorId: [1:2313:3110], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NDQyZDZiMS1mZmY1ZDNkOS1jODY0ZTI0Ny0yNTg2ODExZA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-10-24T16:27:45.501347Z node 1 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [1:2312:3109], ActorId: [1:2313:3110], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NDQyZDZiMS1mZmY1ZDNkOS1jODY0ZTI0Ny0yNTg2ODExZA==, TxId: 01k8bgrbeh7qrp5pz0rf44vqyw 2025-10-24T16:27:45.501379Z node 1 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [1:2312:3109], ActorId: [1:2313:3110], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NDQyZDZiMS1mZmY1ZDNkOS1jODY0ZTI0Ny0yNTg2ODExZA==, TxId: 01k8bgrbeh7qrp5pz0rf44vqyw 2025-10-24T16:27:45.501434Z node 1 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [1:2311:3108], ActorId: [1:2312:3109], Got response [1:2313:3110] SUCCESS >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> StatisticsSaveLoad::ForbidAccess [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-10-24T16:27:44.959948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:44.985372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:44.985439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:44.985446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018a0/r3tmp/tmpSSsgGC/pdisk_1.dat 2025-10-24T16:27:45.046538Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:45.047144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:45.047171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:45.047254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323264642409 != 1761323264642412 2025-10-24T16:27:45.078040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:45.124285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:45.167710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:45.241184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:45.255866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:45.256084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:45.256142Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:45.256202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:45.262904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:45.263056Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:45.263076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:45.263200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:45.263206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:45.263213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:45.263274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:45.263291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:45.263300Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:45.273552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:45.277056Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:45.277128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:45.277157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:45.277164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:45.277170Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:45.277176Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:45.277257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.277265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.277377Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:45.277401Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:45.277414Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:45.277422Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:45.277429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:45.277433Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:45.277437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:45.277441Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:45.277445Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:45.277454Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.277458Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.277464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:45.277562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:45.277567Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:45.277583Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:45.277635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:45.277645Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:45.277660Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:45.277666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:45.277669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:45.277673Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:45.277676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:45.277771Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:45.277775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:45.277778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:45.277781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:45.277789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:45.277792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:45.277796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:45.277798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:45.277803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:45.278027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:45.278034Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:45.288299Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:45.288325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:45.288332Z n ... : 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.991441Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1142:2706] TxId: 281474976710676. Ctx: { TraceId: 01k8bgrcr64tkg6zwwpt8gask6, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU4NzM3YzktOWVmNGFmNWItY2M2YzJlNjAtMjAxMzViODE=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.991449Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1142:2706] TxId: 281474976710676. Ctx: { TraceId: 01k8bgrcr64tkg6zwwpt8gask6, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU4NzM3YzktOWVmNGFmNWItY2M2YzJlNjAtMjAxMzViODE=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.991455Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1142:2706] TxId: 281474976710676. Ctx: { TraceId: 01k8bgrcr64tkg6zwwpt8gask6, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU4NzM3YzktOWVmNGFmNWItY2M2YzJlNjAtMjAxMzViODE=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:46.991494Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, ActorId: [2:898:2721], ActorState: ExecuteState, TraceId: 01k8bgrcr75vjb0xabarrksfjs, Create QueryResponse for error on request, msg: 2025-10-24T16:27:46.991891Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710677. Resolved key sets: 0 2025-10-24T16:27:46.991949Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710677. Ctx: { TraceId: 01k8bgrcr72htbfgd0jyhctyxt, Database: , SessionId: ydb://session/3?node_id=2&id=OTBlYTM4ZjEtMzA3NTk2MjgtZjYwOTY3M2EtNzEyM2JlMGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:46.991956Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710677. Ctx: { TraceId: 01k8bgrcr72htbfgd0jyhctyxt, Database: , SessionId: ydb://session/3?node_id=2&id=OTBlYTM4ZjEtMzA3NTk2MjgtZjYwOTY3M2EtNzEyM2JlMGQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.991962Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1143:2713] TxId: 281474976710677. Ctx: { TraceId: 01k8bgrcr72htbfgd0jyhctyxt, Database: , SessionId: ydb://session/3?node_id=2&id=OTBlYTM4ZjEtMzA3NTk2MjgtZjYwOTY3M2EtNzEyM2JlMGQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.991974Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1143:2713] TxId: 281474976710677. Ctx: { TraceId: 01k8bgrcr72htbfgd0jyhctyxt, Database: , SessionId: ydb://session/3?node_id=2&id=OTBlYTM4ZjEtMzA3NTk2MjgtZjYwOTY3M2EtNzEyM2JlMGQ=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.991982Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1143:2713] TxId: 281474976710677. Ctx: { TraceId: 01k8bgrcr72htbfgd0jyhctyxt, Database: , SessionId: ydb://session/3?node_id=2&id=OTBlYTM4ZjEtMzA3NTk2MjgtZjYwOTY3M2EtNzEyM2JlMGQ=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:46.992196Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710678. Resolved key sets: 0 2025-10-24T16:27:46.992308Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710678. Ctx: { TraceId: 01k8bgrcr7bvvd8h5z9mn15q21, Database: , SessionId: ydb://session/3?node_id=2&id=ODM1N2ExZDctOTAxNjYzMTItOTVlYTIyOTQtNzdlN2JhYzQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:46.992315Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710678. Ctx: { TraceId: 01k8bgrcr7bvvd8h5z9mn15q21, Database: , SessionId: ydb://session/3?node_id=2&id=ODM1N2ExZDctOTAxNjYzMTItOTVlYTIyOTQtNzdlN2JhYzQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.992321Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1152:2715] TxId: 281474976710678. Ctx: { TraceId: 01k8bgrcr7bvvd8h5z9mn15q21, Database: , SessionId: ydb://session/3?node_id=2&id=ODM1N2ExZDctOTAxNjYzMTItOTVlYTIyOTQtNzdlN2JhYzQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.992332Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1152:2715] TxId: 281474976710678. Ctx: { TraceId: 01k8bgrcr7bvvd8h5z9mn15q21, Database: , SessionId: ydb://session/3?node_id=2&id=ODM1N2ExZDctOTAxNjYzMTItOTVlYTIyOTQtNzdlN2JhYzQ=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.992337Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1152:2715] TxId: 281474976710678. Ctx: { TraceId: 01k8bgrcr7bvvd8h5z9mn15q21, Database: , SessionId: ydb://session/3?node_id=2&id=ODM1N2ExZDctOTAxNjYzMTItOTVlYTIyOTQtNzdlN2JhYzQ=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:46.992341Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710679. Resolved key sets: 0 2025-10-24T16:27:46.992362Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710679. Ctx: { TraceId: 01k8bgrcr7f7th69hj5g1mv0eq, Database: , SessionId: ydb://session/3?node_id=2&id=YmY0MDNiY2MtZjQwY2JmZTktOWMxMzI1NDgtMmQ0ZTQxZmM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:46.992365Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710679. Ctx: { TraceId: 01k8bgrcr7f7th69hj5g1mv0eq, Database: , SessionId: ydb://session/3?node_id=2&id=YmY0MDNiY2MtZjQwY2JmZTktOWMxMzI1NDgtMmQ0ZTQxZmM=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.992368Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1157:2717] TxId: 281474976710679. Ctx: { TraceId: 01k8bgrcr7f7th69hj5g1mv0eq, Database: , SessionId: ydb://session/3?node_id=2&id=YmY0MDNiY2MtZjQwY2JmZTktOWMxMzI1NDgtMmQ0ZTQxZmM=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.992373Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1157:2717] TxId: 281474976710679. Ctx: { TraceId: 01k8bgrcr7f7th69hj5g1mv0eq, Database: , SessionId: ydb://session/3?node_id=2&id=YmY0MDNiY2MtZjQwY2JmZTktOWMxMzI1NDgtMmQ0ZTQxZmM=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.992376Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1157:2717] TxId: 281474976710679. Ctx: { TraceId: 01k8bgrcr7f7th69hj5g1mv0eq, Database: , SessionId: ydb://session/3?node_id=2&id=YmY0MDNiY2MtZjQwY2JmZTktOWMxMzI1NDgtMmQ0ZTQxZmM=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:46.992461Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710680. Resolved key sets: 0 2025-10-24T16:27:46.992465Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710681. Resolved key sets: 0 2025-10-24T16:27:46.992476Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710680. Ctx: { TraceId: 01k8bgrcr75s0q28k1794zkry5, Database: , SessionId: ydb://session/3?node_id=2&id=YmViOWIzNmEtZWM4MTFiYmEtZWNiYTI4NmItZDQ3NzU0NTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:46.992479Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710680. Ctx: { TraceId: 01k8bgrcr75s0q28k1794zkry5, Database: , SessionId: ydb://session/3?node_id=2&id=YmViOWIzNmEtZWM4MTFiYmEtZWNiYTI4NmItZDQ3NzU0NTk=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.992483Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1158:2719] TxId: 281474976710680. Ctx: { TraceId: 01k8bgrcr75s0q28k1794zkry5, Database: , SessionId: ydb://session/3?node_id=2&id=YmViOWIzNmEtZWM4MTFiYmEtZWNiYTI4NmItZDQ3NzU0NTk=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.992488Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1158:2719] TxId: 281474976710680. Ctx: { TraceId: 01k8bgrcr75s0q28k1794zkry5, Database: , SessionId: ydb://session/3?node_id=2&id=YmViOWIzNmEtZWM4MTFiYmEtZWNiYTI4NmItZDQ3NzU0NTk=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.992493Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1158:2719] TxId: 281474976710680. Ctx: { TraceId: 01k8bgrcr75s0q28k1794zkry5, Database: , SessionId: ydb://session/3?node_id=2&id=YmViOWIzNmEtZWM4MTFiYmEtZWNiYTI4NmItZDQ3NzU0NTk=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:46.992500Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710681. Ctx: { TraceId: 01k8bgrcr75vjb0xabarrksfjs, Database: , SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:46.992504Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710681. Ctx: { TraceId: 01k8bgrcr75vjb0xabarrksfjs, Database: , SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:46.992509Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1162:2721] TxId: 281474976710681. Ctx: { TraceId: 01k8bgrcr75vjb0xabarrksfjs, Database: , SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:46.992515Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1162:2721] TxId: 281474976710681. Ctx: { TraceId: 01k8bgrcr75vjb0xabarrksfjs, Database: , SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:46.992520Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1162:2721] TxId: 281474976710681. Ctx: { TraceId: 01k8bgrcr75vjb0xabarrksfjs, Database: , SessionId: ydb://session/3?node_id=2&id=M2E5ZmE4MjUtM2Y1NTZjNjktODE1MmJiNDMtYmZjMDM0ODE=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-10-24T16:27:41.824967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:41.851630Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:41.851737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:41.851766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00229c/r3tmp/tmptgDmMI/pdisk_1.dat 2025-10-24T16:27:41.934922Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:41.967570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:41.967616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:42.000971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32538, node 1 2025-10-24T16:27:42.089542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:42.089573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:42.089579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:42.089787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:42.097786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:42.125297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18381 2025-10-24T16:27:42.454126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:43.276806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.278696Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:43.279344Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:43.285456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.285481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.308227Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:43.308697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.387039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.387072Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.398577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.461626Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:43.483038Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483238Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483422Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483536Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483571Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483589Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.483605Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:43.541542Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.549500Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:43.549525Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:43.555579Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:43.555788Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:43.555810Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:43.555815Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:43.555819Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:43.555824Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:43.555828Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:43.555833Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:43.556190Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:43.556701Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1636:2456] 2025-10-24T16:27:43.557387Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:43.557952Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:43.557960Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:43.557969Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:43.561034Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:43.561052Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1881:2603], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:43.562338Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1895:2612] 2025-10-24T16:27:43.562397Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1895:2612], schemeshard id = 72075186224037897 2025-10-24T16:27:43.566192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.567702Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:43.567729Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:43.626733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:43.702299Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:43.790286Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:43.883923Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:44.734958Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:44.765642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2223:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.765681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.765769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.765777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.768922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:44.977648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2521:3108], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.977691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.977838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2525:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.977844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.978146Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2528:3114]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:44.978179Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:44.978189Z node 1 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [1:2530:3116] 2025-10-24T16:27:44.978198Z node 1 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [1:2530:3116] 2025-10-24T16:27:44.978320Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:2531:2975] 2025-10-24T16:27:44.978378Z node 1 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 1, client id = [1:2530:3116], server id = [2:2531:2975], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:44.978411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:2531:2975], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:44.978418Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-10-24T16:27:44.978468Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-10-24T16:27:44.978475Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [1:2528:3114], StatRequests.size() = 1 2025-10-24T16:27:44.980760Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:44.980828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2535:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.980841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.980910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2539:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.980915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.980943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:44.982040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:45.089990Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:45.090024Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:45.182868Z node 1 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [1:2530:3116], schemeshard count = 1 2025-10-24T16:27:45.581119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2544:3129], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:27:45.735680Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:2648:3197] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:45.738072Z node 1 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2671:3213]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:45.738112Z node 1 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:45.738117Z node 1 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [1:2671:3213], StatRequests.size() = 1 2025-10-24T16:27:45.746289Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgrayc7d6svwjfrc3vvgqg, Database: , SessionId: ydb://session/3?node_id=1&id=MzViZjQ3ZDAtOTkzY2Y4YWYtNTI3MDFjNS01Y2VkZWExMw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:45.782986Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [1:2750:3243], for# user@builtin, access# DescribeSchema 2025-10-24T16:27:45.783015Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [1:2750:3243], for# user@builtin, access# DescribeSchema 2025-10-24T16:27:45.785319Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:2740:3239], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:27:45.786003Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=1&id=YzA2MThlZWEtZjhlNzYwMGEtZmYzMWViMWQtNmIzOTMxMzI=, ActorId: [1:2731:3231], ActorState: ExecuteState, TraceId: 01k8bgrbqg26fwm12ss0eae94r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-10-24T16:27:45.142793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:45.166973Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:45.167034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:45.167041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189d/r3tmp/tmpsQ9MeW/pdisk_1.dat 2025-10-24T16:27:45.236019Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:45.237114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:45.237151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:45.237271Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323264857478 != 1761323264857481 2025-10-24T16:27:45.268390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:45.315996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:45.317123Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-10-24T16:27:45.317138Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:398:2397] Proxy marker# C1 2025-10-24T16:27:45.348285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:45.430076Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-10-24T16:27:45.430112Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-10-24T16:27:45.430178Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-10-24T16:27:45.430298Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-10-24T16:27:45.430309Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:398:2397] Proxy 2025-10-24T16:27:45.430931Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-10-24T16:27:45.430955Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-10-24T16:27:45.430961Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:6] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-10-24T16:27:45.430966Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:6] persistent tx 1 acknowledged 2025-10-24T16:27:45.432112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:45.446088Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:665:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:45.446243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:665:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:45.446279Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-10-24T16:27:45.446321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:45.447016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:665:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:45.452652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:45.452682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:45.452795Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:45.452802Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:45.452806Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:45.452852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:45.452867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:45.452877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:689:2566] in generation 1 2025-10-24T16:27:45.452939Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:45.455820Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:45.455883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:45.455900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:691:2576] 2025-10-24T16:27:45.455905Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:45.455908Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:45.455912Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:45.455951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.455956Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.456024Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:45.456039Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:45.456051Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:45.456055Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:45.456062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:45.456066Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:45.456069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:45.456072Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:45.456076Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:45.456083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:676:2567], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.456087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.456091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2563], serverId# [1:676:2567], sessionId# [0:0:0] 2025-10-24T16:27:45.456153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:676:2567] 2025-10-24T16:27:45.456157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:45.456171Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:45.456212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:45.456222Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:45.456233Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:45.456241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:45.456244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:45.456248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:45.456251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:45.456314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:45.456317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:45.456320Z node 1 :TX_DA ... 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-10-24T16:27:47.136844Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:47.136858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:47.136873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:47.136876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:47.136879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:47.136882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:47.136890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-10-24T16:27:47.136896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:47.136898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:47.136901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:47.136904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:47.136917Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715682 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:47.136962Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976715682, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:27:47.136967Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-10-24T16:27:47.136972Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[1:1427:3067], 0} after executionsCount# 1 2025-10-24T16:27:47.136978Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[1:1427:3067], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:47.136991Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[1:1427:3067], 0} finished in read 2025-10-24T16:27:47.136999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:47.137003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:47.137006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:47.137009Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:47.137015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:47.137017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:47.137019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037888 has finished 2025-10-24T16:27:47.137023Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:47.137034Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:47.137238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [1:1427:3067], Recipient [1:1368:3032]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:47.137245Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-10-24T16:27:47.162088Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-10-24T16:27:47.162118Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-10-24T16:27:47.162402Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715685. Ctx: { TraceId: 01k8bgrd220qjcmbkq98pg7e3c, Database: , SessionId: ydb://session/3?node_id=1&id=NzEzNDJjY2MtZGZmNjBlNmQtYTE4NTgyMWMtYzZmYmYzZDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:47.162792Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [1:1451:3084], Recipient [1:1368:3032]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-10-24T16:27:47.162842Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:47.162856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:47.162870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:47.162874Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:47.162877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:47.162880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:47.162891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-10-24T16:27:47.162896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:47.162898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:47.162901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:47.162904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:47.162918Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715685 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:47.162969Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976715685, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:27:47.162973Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-10-24T16:27:47.162978Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[1:1451:3084], 0} after executionsCount# 1 2025-10-24T16:27:47.162984Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[1:1451:3084], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:47.162996Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[1:1451:3084], 0} finished in read 2025-10-24T16:27:47.163006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:47.163008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:47.163011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:47.163013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:47.163019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:47.163021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:47.163024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037888 has finished 2025-10-24T16:27:47.163027Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:47.163039Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:47.163067Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [1:69:2116], Recipient [1:1368:3032]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715685 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-10-24T16:27:47.163169Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [1:1451:3084], Recipient [1:1368:3032]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:47.163176Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } |60.6%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-10-24T16:27:19.699377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.740754Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.740850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.740887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002af0/r3tmp/tmpGVG2z1/pdisk_1.dat 2025-10-24T16:27:19.843744Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:19.876796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:19.876842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.910242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15705, node 1 2025-10-24T16:27:19.999028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:19.999052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:19.999057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:19.999254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.002975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.050237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28594 2025-10-24T16:27:20.408243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.248243Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.249691Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.250702Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.258516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.258556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.281861Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.282630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.376402Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.376440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.376823Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377025Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377139Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377309Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377407Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377440Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377472Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.377492Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.389448Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.444318Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.450620Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.450654Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.458258Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.458374Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.458399Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.458404Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.458409Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.458415Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.458420Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.458427Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.458836Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.461917Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.461939Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1827:2585], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.463491Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1850:2599] 2025-10-24T16:27:21.463540Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1850:2599], schemeshard id = 72075186224037897 2025-10-24T16:27:21.477132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1880:2607] 2025-10-24T16:27:21.477904Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.485294Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.485316Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.485331Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.487525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.489296Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.489329Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:21.545485Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.595134Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.653278Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.739008Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:21.874559Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.559963Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.619754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2223:3059], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.619788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.619994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.620010Z node 1 ... 21: ConnectToSA(), pipe client id = [2:4900:4465] 2025-10-24T16:27:45.709813Z node 2 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [2:4900:4465] 2025-10-24T16:27:45.709943Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4901:4466] 2025-10-24T16:27:45.709978Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4900:4465], server id = [2:4901:4466], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:45.709988Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:4901:4466], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:45.709994Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:45.710029Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:45.710042Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:4898:4463], StatRequests.size() = 1 2025-10-24T16:27:45.710050Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:45.727826Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4888:4453], ActorId: [2:4889:4454], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDU3Mzc3NjItMzI0MTIwMTQtZjEyMDNmODAtMTViMDIwZTE=, TxId: 2025-10-24T16:27:45.727867Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4888:4453], ActorId: [2:4889:4454], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDU3Mzc3NjItMzI0MTIwMTQtZjEyMDNmODAtMTViMDIwZTE=, TxId: 2025-10-24T16:27:45.728104Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4887:4452], ActorId: [2:4888:4453], Got response [2:4889:4454] SUCCESS 2025-10-24T16:27:45.728201Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:45.739815Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:45.739837Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:45.813197Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:45.813223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:45.834043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:4900:4465], schemeshard count = 1 2025-10-24T16:27:47.116505Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:47.116531Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:47.116540Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:47.116545Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:47.117724Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:47.129685Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:47.129868Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:47.129895Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:47.130191Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:47.141601Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:47.141677Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:47.141936Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5010:4520], server id = [2:5014:4524], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:47.142052Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5010:4520], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:47.142107Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5011:4521], server id = [2:5015:4525], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:47.142114Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5011:4521], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:47.142135Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5012:4522], server id = [2:5016:4526], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:47.142141Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5012:4522], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:47.142327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5013:4523], server id = [2:5017:4527], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:47.142336Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5013:4523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:47.143362Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:47.143463Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5010:4520], server id = [2:5014:4524], tablet id = 72075186224037899 2025-10-24T16:27:47.143468Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:47.143563Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:47.143635Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5011:4521], server id = [2:5015:4525], tablet id = 72075186224037900 2025-10-24T16:27:47.143640Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:47.143985Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:47.144026Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:47.144032Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:47.144060Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:47.144086Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:47.144166Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5030:4536], ActorId: [2:5031:4537], Starting query actor #1 [2:5032:4538] 2025-10-24T16:27:47.144177Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5031:4537], ActorId: [2:5032:4538], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:47.144719Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5012:4522], server id = [2:5016:4526], tablet id = 72075186224037901 2025-10-24T16:27:47.144728Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:47.144833Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5013:4523], server id = [2:5017:4527], tablet id = 72075186224037902 2025-10-24T16:27:47.144837Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:47.144911Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5031:4537], ActorId: [2:5032:4538], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmVhMDNhZGUtZmE2MGQ5OTktZTFhZTk4YTgtMjA5M2MxMjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:47.151399Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5041:4547]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:47.151456Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:47.151463Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5041:4547], StatRequests.size() = 1 2025-10-24T16:27:47.199275Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5031:4537], ActorId: [2:5032:4538], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmVhMDNhZGUtZmE2MGQ5OTktZTFhZTk4YTgtMjA5M2MxMjI=, TxId: 2025-10-24T16:27:47.199301Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5031:4537], ActorId: [2:5032:4538], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmVhMDNhZGUtZmE2MGQ5OTktZTFhZTk4YTgtMjA5M2MxMjI=, TxId: 2025-10-24T16:27:47.199361Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5030:4536], ActorId: [2:5031:4537], Got response [2:5032:4538] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:47.199427Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5054:4553]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:47.199483Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:47.199579Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:47.199584Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:47.199629Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:47.199635Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:47.199641Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:47.200665Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> DataShardOutOfOrder::TestReadTableWriteConflict >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-10-24T16:27:43.223259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.248390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:43.248447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:43.248454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018a8/r3tmp/tmpkXW4Qp/pdisk_1.dat 2025-10-24T16:27:43.315452Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.316359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.316397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.316511Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323262928837 != 1761323262928840 2025-10-24T16:27:43.347270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.392693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:43.424871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:43.507549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.521071Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:43.521242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:43.521280Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:43.521320Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:43.522083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:43.528058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:43.528094Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:43.528252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:43.528261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:43.528269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:43.528317Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:43.528332Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:43.528341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:43.538581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.541821Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:43.541871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.541887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:43.541893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:43.541897Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:43.541901Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:43.541949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.541954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.542020Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:43.542033Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:43.542046Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:43.542052Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.542056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:43.542059Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:43.542062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:43.542065Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:43.542069Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:43.542076Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.542079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.542083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:43.542146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:43.542150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.542165Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:43.542207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:43.542217Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:43.542230Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:43.542237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.542242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:43.542248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:43.542251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.542316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.542320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:43.542322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:43.542325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:43.542332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:43.542334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:43.551395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:43.551422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:43.551428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.551891Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:43.551906Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:43.562263Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:43.562292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.562299Z n ... default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 6 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], CA [2:1041:2826], 2025-10-24T16:27:47.933621Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1041:2826], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 85 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 20 FinishTimeMs: 1761323267933 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 8 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323267932 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323267933 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:47.933628Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1041:2826] 2025-10-24T16:27:47.933634Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-10-24T16:27:47.933638Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-10-24T16:27:47.933711Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1042:2827], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 223 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 138 FinishTimeMs: 1761323267933 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 126 BuildCpuTimeUs: 12 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323267932 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323267933 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:47.933717Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1042:2827] 2025-10-24T16:27:47.933721Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-10-24T16:27:47.933725Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-10-24T16:27:47.933760Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1043:2828], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 151 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 107 FinishTimeMs: 1761323267933 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 97 BuildCpuTimeUs: 10 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323267932 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323267933 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:47.933765Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1043:2828] 2025-10-24T16:27:47.933769Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], CA [2:1044:2829], 2025-10-24T16:27:47.933772Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], CA [2:1044:2829], 2025-10-24T16:27:48.033403Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1044:2829], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 175 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 130 FinishTimeMs: 1761323267933 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 102 BuildCpuTimeUs: 28 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323267933 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323267933 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:48.033439Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1044:2829] 2025-10-24T16:27:48.033454Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], 2025-10-24T16:27:48.033460Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], 2025-10-24T16:27:48.033541Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1045:2830], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 87 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 51 FinishTimeMs: 1761323267933 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 31 BuildCpuTimeUs: 20 HostName: "ghrun-aakgzge3ui" NodeId: 2 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323267933 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:48.033549Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1045:2830] 2025-10-24T16:27:48.033554Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1046:2831], 2025-10-24T16:27:48.033558Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1046:2831], 2025-10-24T16:27:48.033703Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2831], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 102 DurationUs: 100000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 50 FinishTimeMs: 1761323268033 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 12 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323267933 CreateTimeMs: 1761323267831 UpdateTimeMs: 1761323268033 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:48.033712Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1046:2831] 2025-10-24T16:27:48.033781Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:48.033789Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k8bgrdmjd5wsqchcsrk28bjg, Database: , SessionId: ydb://session/3?node_id=2&id=ODRlMTA3Ni05Y2IxN2RiNC1lOWU2YWM3YS1iOWRlMjVlNA==, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.001183s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } |60.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> DataShardOutOfOrder::UncommittedReads >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-10-24T16:27:47.081973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:47.105406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:47.105488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:47.105501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001893/r3tmp/tmpPcYyW8/pdisk_1.dat 2025-10-24T16:27:47.165646Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:47.166260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:47.166293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:47.166378Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323266773115 != 1761323266773118 2025-10-24T16:27:47.197286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:47.243679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:47.286435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:47.286458Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-10-24T16:27:47.358448Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-10-24T16:27:47.358469Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-10-24T16:27:47.358485Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:651:2546] 2025-10-24T16:27:47.373573Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:651:2546] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-10-24T16:27:47.373610Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:651:2546] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-10-24T16:27:47.373843Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [1:651:2546] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-10-24T16:27:47.373867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:651:2546] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-10-24T16:27:47.373930Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:651:2546] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-10-24T16:27:47.373982Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:651:2546] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-10-24T16:27:47.373997Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:651:2546] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-10-24T16:27:47.374491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:47.374631Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:651:2546] txid# 281474976715657 HANDLE EvClientConnected 2025-10-24T16:27:47.374753Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:651:2546] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-10-24T16:27:47.374764Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:651:2546] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-10-24T16:27:47.389193Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:667:2561], Recipient [1:676:2567]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:47.389451Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:667:2561], Recipient [1:676:2567]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:47.389518Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2567] 2025-10-24T16:27:47.389567Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:47.398726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:667:2561], Recipient [1:676:2567]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:47.398897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:47.398922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:47.399090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:47.399100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:47.399107Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:47.399173Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:47.399193Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:47.399205Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:691:2567] in generation 1 2025-10-24T16:27:47.409449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:47.416851Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:47.416924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:47.416943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:693:2577] 2025-10-24T16:27:47.416947Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:47.416951Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:47.416955Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:47.417009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:676:2567], Recipient [1:676:2567]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:47.417015Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:47.417090Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:47.417110Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:47.417124Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:47.417129Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:47.417134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:47.417137Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:47.417140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:47.417143Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:47.417147Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:47.417219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:678:2568], Recipient [1:676:2567]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:47.417226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:47.417230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:678:2568], sessionId# [0:0:0] 2025-10-24T16:27:47.417237Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:678:2568] 2025-10-24T16:27:47.417241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:47.417257Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:47.417306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:47.417316Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:47.417329Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_ ... tZGRhNzVkNDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-10-24T16:27:49.446732Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710662, task: 1. pass away 2025-10-24T16:27:49.446745Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-10-24T16:27:49.447257Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-10-24T16:27:49.447293Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:893:2711];scan_id=2;tx_id=281474976710662;fline=kqp_scan_fetcher_actor.cpp:104;event=TEvTerminateFromCompute;sender=[2:890:2708];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-10-24T16:27:49.447299Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:893:2711];scan_id=2;tx_id=281474976710662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-10-24T16:27:49.447363Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:885:2682] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:890:2708], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 33078 Tasks { TaskId: 1 CpuTimeUs: 32802 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 7 BuildCpuTimeUs: 32795 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-aakgzge3ui" NodeId: 2 CreateTimeMs: 1761323269248 CurrentWaitInputTimeUs: 96729 UpdateTimeMs: 1761323269446 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:49.447378Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:890:2708] 2025-10-24T16:27:49.447397Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:972: ActorId: [2:885:2682] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. task: 1, does not have the CA id yet or is already complete 2025-10-24T16:27:49.447405Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:967: ActorId: [2:885:2682] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:891:2709], task: 2 2025-10-24T16:27:49.447415Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:967: ActorId: [2:885:2682] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:892:2710], task: 3 2025-10-24T16:27:49.447451Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:885:2682] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:49.447468Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [2:891:2709], TxId: 281474976710662, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgrezsf5my177wztxqktkn. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2025-10-24T16:27:49.447476Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [2:891:2709], TxId: 281474976710662, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgrezsf5my177wztxqktkn. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:885:2682], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-10-24T16:27:49.447491Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710662, task: 2. pass away 2025-10-24T16:27:49.447500Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-10-24T16:27:49.447825Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-10-24T16:27:49.447842Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:156: SelfId: [2:892:2710], TxId: 281474976710662, task: 3. Ctx: { CheckpointId : . TraceId : 01k8bgrezsf5my177wztxqktkn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-10-24T16:27:49.447865Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [2:892:2710], TxId: 281474976710662, task: 3. Ctx: { CheckpointId : . TraceId : 01k8bgrezsf5my177wztxqktkn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:885:2682], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-10-24T16:27:49.447877Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710662, task: 3. pass away 2025-10-24T16:27:49.447885Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-10-24T16:27:49.448417Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-10-24T16:27:49.448459Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-10-24T16:27:49.448489Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, ActorId: [2:859:2682], ActorState: ExecuteState, TraceId: 01k8bgrezsf5my177wztxqktkn, Create QueryResponse for error on request, msg: 2025-10-24T16:27:49.448581Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:67:2114] Handle TEvExecuteKqpTransaction 2025-10-24T16:27:49.448588Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:67:2114] TxId# 281474976710664 ProcessProposeKqpTransaction 2025-10-24T16:27:49.448681Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976710661] shutting down 2025-10-24T16:27:49.448694Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-10-24T16:27:49.448697Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 0 ProcessProposeTransaction 2025-10-24T16:27:49.448709Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 0 reqId# [2:929:2742] SnapshotReq marker# P0 2025-10-24T16:27:49.448761Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710664. Resolved key sets: 0 2025-10-24T16:27:49.448803Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:932:2742] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-10-24T16:27:49.448830Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710664. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:49.448836Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710664. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:49.448843Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:928:2682] TxId: 281474976710664. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:49.448855Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:928:2682] TxId: 281474976710664. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:49.448861Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:928:2682] TxId: 281474976710664. Ctx: { TraceId: 01k8bgrezsf5my177wztxqktkn, Database: , SessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:49.448909Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:932:2742] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-10-24T16:27:49.448925Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: Forwarded response to sender actor, requestId: 3, sender: [2:591:2519], selfId: [2:65:2112], source: [2:859:2682] 2025-10-24T16:27:49.448957Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:929:2742] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-10-24T16:27:49.449133Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=2&id=MjU2ZmI5NzgtZTY0YjkzZmYtMWE2NmZlYWUtZGRhNzVkNDQ=, workerId: [2:859:2682], local sessions count: 0 2025-10-24T16:27:49.449151Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:929:2742], Recipient [2:675:2567]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710661 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 41 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-10-24T16:27:43.443125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.469456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:43.469528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:43.469535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018a4/r3tmp/tmpm4nhU4/pdisk_1.dat 2025-10-24T16:27:43.551824Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.552729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.552760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.552871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323263162603 != 1761323263162606 2025-10-24T16:27:43.583803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.630373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:43.673576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:43.746264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.759763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:43.759954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:43.760004Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:43.760050Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:43.766014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:43.766152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:43.766172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:43.766304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:43.766312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:43.766316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:43.766367Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:43.766382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:43.766392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:43.776633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.779639Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:43.779725Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.779746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:43.779750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:43.779754Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:43.779758Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:43.779812Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.779817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.779937Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:43.779960Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:43.779975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:43.779983Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.779989Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:43.779994Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:43.779997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:43.780002Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:43.780006Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:43.780014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.780018Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.780022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:43.780103Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:43.780107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.780123Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:43.780175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:43.780184Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:43.780198Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:43.780205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.780208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:43.780211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:43.780214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.780287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.780290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:43.780293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:43.780296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:43.780305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:43.780308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:43.809840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:43.809870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:43.809876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.810224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:43.810232Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:43.820501Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:43.820528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.820534Z n ... e 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:253: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Shards nodes resolved, success: 1, failed: 0 2025-10-24T16:27:50.088275Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:276: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Shards on nodes: node 2: [72075186224037888] 2025-10-24T16:27:50.088284Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.088292Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-10-24T16:27:50.177920Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:826: TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Collect channels updates for task: 1 at actor [2:1234:2939] 2025-10-24T16:27:50.177939Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:818: TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Sending channels info to compute actor: [2:1234:2939], channels: 1 2025-10-24T16:27:50.177950Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-10-24T16:27:50.177959Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1234:2939], 2025-10-24T16:27:50.177966Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1234:2939], 2025-10-24T16:27:50.177970Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2285: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-10-24T16:27:50.178164Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1234:2939], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-10-24T16:27:50.178170Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1234:2939], 2025-10-24T16:27:50.178175Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1234:2939], 2025-10-24T16:27:50.178261Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [2:1236:2939], Recipient [2:1169:2904]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-10-24T16:27:50.178292Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:50.178300Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976710667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v4001/18446744073709551615 2025-10-24T16:27:50.178306Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-10-24T16:27:50.178313Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:50.178325Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:50.178329Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:50.178333Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:50.178337Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:50.178347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-10-24T16:27:50.178353Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:50.178356Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:50.178361Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:50.178365Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:50.178378Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-10-24T16:27:50.178406Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[2:1236:2939], 0} after executionsCount# 1 2025-10-24T16:27:50.178413Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[2:1236:2939], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:50.178424Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[2:1236:2939], 0} finished in read 2025-10-24T16:27:50.178431Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:50.178433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:50.178436Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:50.178439Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:50.178444Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:27:50.178447Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:50.178450Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037888 has finished 2025-10-24T16:27:50.178453Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:50.178581Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [2:1236:2939], Recipient [2:1169:2904]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:50.178587Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:27:50.178701Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1234:2939], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 177 Tasks { TaskId: 1 CpuTimeUs: 43 FinishTimeMs: 1761323270178 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 30 HostName: "ghrun-aakgzge3ui" NodeId: 2 StartTimeMs: 1761323270178 CreateTimeMs: 1761323270178 UpdateTimeMs: 1761323270178 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:50.178717Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1234:2939] 2025-10-24T16:27:50.178750Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:50.178756Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1230:2939] TxId: 281474976710670. Ctx: { TraceId: 01k8bgrfxyfznp26798qvt3vte, Database: , SessionId: ydb://session/3?node_id=2&id=ZjFkZmZhZDMtODM0YTM0YjAtN2NmMjUwMzQtNmY2ZDUxMTI=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000177s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-10-24T16:27:20.080518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:20.117433Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:20.117529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:20.117567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002acc/r3tmp/tmpigmmh5/pdisk_1.dat 2025-10-24T16:27:20.213477Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.246604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.246646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.283716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29857, node 1 2025-10-24T16:27:20.393115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.393141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.393146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.393319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.396810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.436546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11016 2025-10-24T16:27:20.771349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.549179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.551330Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.552170Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.560065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.560094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.583408Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.583942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.665975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.666022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.677941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.731387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.752904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753087Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753226Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753338Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753378Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753394Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753428Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.753472Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.819323Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.825625Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.825659Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.835216Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.835288Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.835311Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.835315Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.835319Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.835324Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.835328Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.835333Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.835434Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.837221Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.837240Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.838755Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:21.838792Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:21.882867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:21.883889Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.887799Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.887834Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.887846Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.890963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.892922Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.892954Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:21.907641Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:22.032978Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.096998Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:22.202493Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.099471Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.170337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2229:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.170394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.170522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2247:3066], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.170531Z node 1 ... X_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:27:34.296020Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.254430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:27:35.254474Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:35.310224Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:35.310396Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 3 2025-10-24T16:27:35.310484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2025-10-24T16:27:37.669672Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:39.260873Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:39.261097Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 4 2025-10-24T16:27:39.261189Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2025-10-24T16:27:41.516268Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:43.079513Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:43.079673Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 5 2025-10-24T16:27:43.079738Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-10-24T16:27:45.584495Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:46.578723Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-10-24T16:27:46.578760Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8321: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:46.578764Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8352: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:46.578767Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-10-24T16:27:47.699757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:47.699944Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 6 2025-10-24T16:27:47.700011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-10-24T16:27:47.710401Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:47.710430Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:47.710494Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-10-24T16:27:47.721736Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:48.694717Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:48.694743Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:48.694747Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:48.694753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-10-24T16:27:48.694757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:48.694832Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:3748:3437], ActorId: [2:3749:3438], Starting query actor #1 [2:3750:3439] 2025-10-24T16:27:48.694841Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:3749:3438], ActorId: [2:3750:3439], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:48.695608Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:3749:3438], ActorId: [2:3750:3439], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTBkYWYwZGQtYWRiODI4MTQtYThiMGEzNzktY2MwMzg5ODc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:48.701738Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3759:3448]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:48.701788Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:48.701798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [2:3761:3450] 2025-10-24T16:27:48.701806Z node 2 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [2:3761:3450] 2025-10-24T16:27:48.701907Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:3762:3451] 2025-10-24T16:27:48.701927Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3762:3451], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:48.701932Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:48.701951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:3761:3450], server id = [2:3762:3451], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:48.701965Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:48.701975Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:3759:3448], StatRequests.size() = 1 2025-10-24T16:27:48.701982Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:48.716098Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3749:3438], ActorId: [2:3750:3439], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTBkYWYwZGQtYWRiODI4MTQtYThiMGEzNzktY2MwMzg5ODc=, TxId: 2025-10-24T16:27:48.716117Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3749:3438], ActorId: [2:3750:3439], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTBkYWYwZGQtYWRiODI4MTQtYThiMGEzNzktY2MwMzg5ODc=, TxId: 2025-10-24T16:27:48.716240Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3748:3437], ActorId: [2:3749:3438], Got response [2:3750:3439] SUCCESS 2025-10-24T16:27:48.716301Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:48.727437Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:48.727456Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:48.789311Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:48.789342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:48.861425Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3761:3450], schemeshard count = 1 2025-10-24T16:27:49.646530Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:49.646560Z node 2 :STATISTICS ERROR: aggregator_impl.cpp:835: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-10-24T16:27:49.646621Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:3815:3478], ActorId: [2:3816:3479], Starting query actor #1 [2:3817:3480] 2025-10-24T16:27:49.646631Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:3816:3479], ActorId: [2:3817:3480], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:49.647213Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:3816:3479], ActorId: [2:3817:3480], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjIwOGEyMjItYzNlODQwODMtMmVmYzA2MTktYTZmNWU1NDM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:49.648976Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3816:3479], ActorId: [2:3817:3480], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjIwOGEyMjItYzNlODQwODMtMmVmYzA2MTktYTZmNWU1NDM=, TxId: 2025-10-24T16:27:49.648993Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3816:3479], ActorId: [2:3817:3480], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjIwOGEyMjItYzNlODQwODMtMmVmYzA2MTktYTZmNWU1NDM=, TxId: 2025-10-24T16:27:49.649039Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3815:3478], ActorId: [2:3816:3479], Got response [2:3817:3480] SUCCESS 2025-10-24T16:27:49.649084Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:49.670582Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:49.670601Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2833:3262] 2025-10-24T16:27:49.670705Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3840:3494]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:49.671128Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:49.671133Z node 2 :STATISTICS ERROR: service_impl.cpp:797: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-10-24T16:27:49.671136Z node 2 :STATISTICS DEBUG: service_impl.cpp:1306: ReplyFailed(), request id = 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-10-24T16:27:21.320410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.347873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.350062Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.350173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.350189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002aa8/r3tmp/tmpBS02a2/pdisk_1.dat 2025-10-24T16:27:21.423454Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.456148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.456178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.477464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64054, node 1 2025-10-24T16:27:21.540295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.540315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.540320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.540409Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.543082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.567764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18173 2025-10-24T16:27:21.896779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.686532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.688207Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.689229Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.695415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.695444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.732522Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.733674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.836126Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.836178Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.836621Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.836784Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.836963Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837154Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837180Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837253Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837287Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837320Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.837352Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.850100Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.906135Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.916401Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.916433Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.921958Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.922176Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.922206Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.922212Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.922218Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.922226Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.922235Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.922247Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.922369Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.923080Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1613:2453] 2025-10-24T16:27:22.924554Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.926672Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.926688Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.926700Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.929630Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.929654Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1845:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.939392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1863:2606] 2025-10-24T16:27:22.939497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1863:2606], schemeshard id = 72075186224037897 2025-10-24T16:27:22.956947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.958501Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.958547Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.008559Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.074335Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.085468Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.207215Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.315882Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.032424Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.088404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2214:3052], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.088444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.088549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2232:3057], DatabaseId: /Root, PoolId: default, Failed to fetch ... 186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:48.044362Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:48.044370Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:48.044375Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:48.044568Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:48.055641Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:48.055678Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:48.055786Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:48.055797Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:48.055993Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:48.056011Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:48.056053Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:48.067066Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:48.067087Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:48.067114Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:48.067245Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4050:3764], server id = [2:4051:3765], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:48.067268Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4050:3764], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:48.067979Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:48.067997Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:48.068044Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:48.068071Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:48.068102Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4050:3764], server id = [2:4051:3765], tablet id = 72075186224037899 2025-10-24T16:27:48.068106Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:48.068147Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4055:3768], ActorId: [2:4056:3769], Starting query actor #1 [2:4057:3770] 2025-10-24T16:27:48.068158Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4056:3769], ActorId: [2:4057:3770], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:48.068917Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4056:3769], ActorId: [2:4057:3770], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTFjMGE3MjEtN2E5M2NhZjktMTNkYjVjZDItYmNhZjgzODk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:48.074212Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4066:3779]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:48.074277Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:48.074282Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4066:3779], StatRequests.size() = 1 2025-10-24T16:27:48.097056Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4056:3769], ActorId: [2:4057:3770], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTFjMGE3MjEtN2E5M2NhZjktMTNkYjVjZDItYmNhZjgzODk=, TxId: 2025-10-24T16:27:48.097079Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4056:3769], ActorId: [2:4057:3770], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTFjMGE3MjEtN2E5M2NhZjktMTNkYjVjZDItYmNhZjgzODk=, TxId: 2025-10-24T16:27:48.097145Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4055:3768], ActorId: [2:4056:3769], Got response [2:4057:3770] SUCCESS 2025-10-24T16:27:48.097291Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:48.129138Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:48.129160Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:48.555393Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:48.555419Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:49.014196Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:49.014226Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:49.014232Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:49.955083Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:49.955132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:49.955137Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:49.955309Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:49.966576Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:49.966689Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:49.966703Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:49.966806Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:49.988339Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:49.988388Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:49.988509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4152:3819], server id = [2:4153:3820], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:49.988534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4152:3819], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:49.988773Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:49.988781Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:49.988821Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:49.988838Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:49.988861Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4152:3819], server id = [2:4153:3820], tablet id = 72075186224037899 2025-10-24T16:27:49.988864Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:49.988894Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4155:3822], ActorId: [2:4156:3823], Starting query actor #1 [2:4157:3824] 2025-10-24T16:27:49.988902Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4156:3823], ActorId: [2:4157:3824], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:49.989489Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4156:3823], ActorId: [2:4157:3824], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDU3ZWI3OGEtNDhkYWZlYTYtYzMzNWU3NTctYmM5M2E0Y2E=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:50.012918Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4156:3823], ActorId: [2:4157:3824], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDU3ZWI3OGEtNDhkYWZlYTYtYzMzNWU3NTctYmM5M2E0Y2E=, TxId: 2025-10-24T16:27:50.012942Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4156:3823], ActorId: [2:4157:3824], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDU3ZWI3OGEtNDhkYWZlYTYtYzMzNWU3NTctYmM5M2E0Y2E=, TxId: 2025-10-24T16:27:50.013016Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4155:3822], ActorId: [2:4156:3823], Got response [2:4157:3824] SUCCESS 2025-10-24T16:27:50.013079Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:50.024446Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:50.024472Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3082:3321] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-10-24T16:27:47.825728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:47.825912Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:47.847950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:47.850001Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:305:2228], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:47.850087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:27:47.850094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:47.850353Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:689:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:47.850382Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:47.850398Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001892/r3tmp/tmpikgH7u/pdisk_1.dat 2025-10-24T16:27:47.924526Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:47.959840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:47.959893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:47.960087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:47.960100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:47.991993Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:47.992161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:47.992241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:48.044030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:48.067939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:48.078659Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:48.338542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:48.366291Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:48.367971Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:48.368049Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1307:2389] 2025-10-24T16:27:48.368090Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:48.368773Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:48.375501Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:48.375549Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:48.375674Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:48.375684Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:48.375691Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:48.375751Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:48.375786Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:48.375796Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1331:2389] in generation 1 2025-10-24T16:27:48.376421Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:48.379176Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:48.379222Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:48.379238Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1335:2406] 2025-10-24T16:27:48.379241Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:48.379245Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:48.379248Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:48.379294Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:1307:2389], Recipient [2:1307:2389]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.379298Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.379352Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:48.379365Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:48.379376Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:48.379381Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:48.379386Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:48.379389Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:48.379393Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:48.379397Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:48.379400Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:48.379426Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [2:1304:2387], Recipient [2:1307:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.379429Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.379432Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1290:2774], serverId# [2:1304:2387], sessionId# [0:0:0] 2025-10-24T16:27:48.379497Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:823:2464], Recipient [2:1304:2387] 2025-10-24T16:27:48.379500Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:48.379511Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:48.379555Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:48.379562Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:48.379575Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-10-24T16:27:48.379579Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:48.379582Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:48.379585Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:48.379588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:48.379623Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:48.379626Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:48.379628Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710657] at 72075186224037888 to e ... ue: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-10-24T16:27:50.139037Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710665. Ctx: { TraceId: 01k8bgrfz98v9548vd6dmgvv25, Database: , SessionId: ydb://session/3?node_id=1&id=ZDZiODgwNDMtMjg0YjhjOGMtNGYxY2EyZWMtMjRiZjg4MTU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.139655Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [2:1745:2447], Recipient [2:1307:2389]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-10-24T16:27:50.139747Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:50.139769Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:50.139790Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.139795Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:50.139799Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:50.139802Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:50.139816Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-10-24T16:27:50.139820Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.139822Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:50.139826Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:50.139828Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:50.139844Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:50.139943Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976710662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:27:50.139955Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:27:50.139963Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[2:1745:2447], 0} after executionsCount# 1 2025-10-24T16:27:50.139970Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[2:1745:2447], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:50.139990Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[2:1745:2447], 0} finished in read 2025-10-24T16:27:50.140002Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.140008Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:50.140012Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:50.140020Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:50.140033Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.140037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:50.140041Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:7] at 72075186224037888 has finished 2025-10-24T16:27:50.140047Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:50.140074Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:50.140363Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [2:1745:2447], Recipient [2:1307:2389]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:50.140373Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-10-24T16:27:50.168768Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710666. Ctx: { TraceId: 01k8bgrfzz6m5vy2fvfpxcw89t, Database: , SessionId: ydb://session/3?node_id=1&id=ZDE4NzJlZi1jYzQ3NzVmYS00OTBmMGUxNi0zNDZmZTMyMQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.169462Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [2:1769:2448], Recipient [2:1307:2389]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-10-24T16:27:50.169522Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:50.169543Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:50.169562Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:8] at 72075186224037888 is Executed 2025-10-24T16:27:50.169567Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:50.169572Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:50.169575Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:50.169588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2025-10-24T16:27:50.169593Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:8] at 72075186224037888 is Executed 2025-10-24T16:27:50.169595Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:50.169598Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:50.169601Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:50.169616Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:50.169692Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976710666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:27:50.169698Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-10-24T16:27:50.169703Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[2:1769:2448], 0} after executionsCount# 1 2025-10-24T16:27:50.169710Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[2:1769:2448], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:50.169727Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[2:1769:2448], 0} finished in read 2025-10-24T16:27:50.169742Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:8] at 72075186224037888 is Executed 2025-10-24T16:27:50.169749Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:50.169753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:50.169756Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:50.169766Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:8] at 72075186224037888 is Executed 2025-10-24T16:27:50.169768Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:50.169771Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:8] at 72075186224037888 has finished 2025-10-24T16:27:50.169775Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:50.169791Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:50.170017Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [2:1769:2448], Recipient [2:1307:2389]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:50.170025Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:27:50.170231Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [2:261:2140], Recipient [2:1307:2389]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |60.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> DataShardOutOfOrder::UncommittedReads [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-10-24T16:27:45.745972Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:45.763164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:45.763186Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:45.763722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:45.763776Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:45.763826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:45.764969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:45.776558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:45.776612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:45.776767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:45.776776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:45.776784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:45.776862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:45.776872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:45.776885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:45.815752Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:45.820762Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:45.820842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:45.820864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:45.820870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:45.820876Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:45.820882Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:45.820943Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.820951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.821027Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:45.821054Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:45.821061Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:45.821067Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:45.821073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:45.821078Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:45.821084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:45.821090Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:45.821095Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:45.821105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.821110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.821116Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:45.821681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:45.821694Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:45.821708Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:45.821749Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:45.821759Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:45.821769Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:45.821775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:45.821780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:45.821786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:45.821791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:45.821855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:45.821859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:45.821864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:45.821868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:45.821878Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:45.821881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:45.821886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:45.821889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:45.821895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:45.832572Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:45.832596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:45.832604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:45.832614Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:45.832631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:45.832721Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.832729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:45.832737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:45.832760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:45.832765Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:45.832795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:45.832802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:45.832807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:45.832812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:45.835772Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:45.835798Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:45.835876Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.835884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:45.835895Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:45.835903Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:45.835909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:45.835919Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:45.835924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... tion status for [1000005:154] at 9437184 is DelayComplete 2025-10-24T16:27:50.074443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2025-10-24T16:27:50.074445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2025-10-24T16:27:50.074448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2025-10-24T16:27:50.074451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:154] at 9437184 is Executed 2025-10-24T16:27:50.074453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2025-10-24T16:27:50.074456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000005:154] at 9437184 has finished 2025-10-24T16:27:50.074458Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:50.074461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:50.074463Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:50.074466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:50.086251Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-10-24T16:27:50.086268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-10-24T16:27:50.086283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:105:2138], exec latency: 2 ms, propose latency: 4 ms 2025-10-24T16:27:50.086294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-10-24T16:27:50.086300Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-10-24T16:27:50.086335Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-10-24T16:27:50.086340Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-10-24T16:27:50.086343Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-10-24T16:27:50.086348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:105:2138], exec latency: 2 ms, propose latency: 4 ms 2025-10-24T16:27:50.086352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:50.086355Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-10-24T16:27:50.086409Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-10-24T16:27:50.086416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086420Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-10-24T16:27:50.086431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:50.086434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086436Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-10-24T16:27:50.086448Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 3 ms, propose latency: 5 ms 2025-10-24T16:27:50.086463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-10-24T16:27:50.086466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086489Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-10-24T16:27:50.086492Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-10-24T16:27:50.086496Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:50.086507Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086524Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086532Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 3 ms, propose latency: 5 ms 2025-10-24T16:27:50.086536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-10-24T16:27:50.086539Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086559Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 3 ms, propose latency: 5 ms 2025-10-24T16:27:50.086582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:50.086585Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086603Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086611Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:50.086614Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:50.086632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-10-24T16:27:50.086636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 2 ms, propose latency: 4 ms 2025-10-24T16:27:50.086640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:50.086643Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:50.086668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-10-24T16:27:50.086671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086674Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-10-24T16:27:50.086688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-10-24T16:27:50.086691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086693Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-10-24T16:27:50.086701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:50.086703Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086706Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-10-24T16:27:50.086713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:50.086718Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:50.086720Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> AnalyzeColumnshard::Analyze [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-10-24T16:27:49.811069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:49.833706Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:49.833765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:49.833772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00188a/r3tmp/tmpdBObhF/pdisk_1.dat 2025-10-24T16:27:49.891680Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:49.892339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:49.892363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:49.892443Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323269550628 != 1761323269550631 2025-10-24T16:27:49.923138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:49.968444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:50.010897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:50.083228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:50.096663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:50.096824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:50.096871Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:50.096911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:50.102589Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:50.102683Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:50.102699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:50.102798Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:50.102804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:50.102808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:50.102852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:50.102876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:50.102884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:50.102923Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:50.105706Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:50.105751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:50.105765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:50.105768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:50.105772Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:50.105775Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:50.105809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:50.105813Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:50.105888Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:50.105900Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:50.105906Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:50.105910Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:50.105915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:50.105918Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:50.105921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:50.105924Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:50.105927Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:50.105990Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:50.105996Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:50.106000Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:50.106012Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:50.106015Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:50.106028Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:50.106071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:50.106079Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:50.106090Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:50.106095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:50.106098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:50.106101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:50.106104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:50.106163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:50.106166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:50.106169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:50.106171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:50.106178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:50.106181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:50.106183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:50.106185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:50.106189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:50.106271Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:50.106274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:50.106277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:50.106283Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-10-24T16:27:50.106290Z node 1 :TX_DATASHAR ... 16:27:50.699543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-10-24T16:27:50.699551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:27:50.699554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-10-24T16:27:50.699557Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:50.699560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:50.699567Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-10-24T16:27:50.699577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-10-24T16:27:50.699581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:27:50.699583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:50.699586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-10-24T16:27:50.699588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-10-24T16:27:50.699591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:27:50.699593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-10-24T16:27:50.699658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-10-24T16:27:50.699662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:27:50.699666Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:6] at 72075186224037888 2025-10-24T16:27:50.699671Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-10-24T16:27:50.699698Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:6] at 72075186224037888, row count=1 2025-10-24T16:27:50.699708Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-10-24T16:27:50.699718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:50.699721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:27:50.699723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-10-24T16:27:50.699726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:50.699734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-10-24T16:27:50.699736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-10-24T16:27:50.699739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:50.699741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:50.699750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:27:50.699752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:50.699755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-10-24T16:27:50.713149Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgrghb8zwsm7f3y4rcftva, Database: , SessionId: ydb://session/3?node_id=1&id=YTUwOGRkNTUtZWNmMzg0ZmMtMmM2ZTQyOGQtYjliOWUzMmI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.713523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [1:979:2761], Recipient [1:675:2566]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-10-24T16:27:50.713562Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:50.713571Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-10-24T16:27:50.713578Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-10-24T16:27:50.713586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:50.713601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.713605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:50.713610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:50.713613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:50.713622Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-10-24T16:27:50.713626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.713629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:50.713632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:50.713634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:50.713645Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-10-24T16:27:50.713681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-10-24T16:27:50.713684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:50.713687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:50.713689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:50.713696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:27:50.713699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:50.713705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:7] at 72075186224037888 has finished 2025-10-24T16:27:50.713708Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:50.784872Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-10-24T16:27:50.784908Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-10-24T16:27:50.917323Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:27:50.917343Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:50.917352Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2025-10-24T16:27:50.917380Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:50.917405Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:50.917408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:50.917416Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[1:979:2761], 0} after executionsCount# 1 2025-10-24T16:27:50.917424Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[1:979:2761], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:50.917445Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[1:979:2761], 0} finished in read 2025-10-24T16:27:50.918102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [1:979:2761], Recipient [1:675:2566]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:50.918116Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-10-24T16:27:19.944566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.982949Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.983028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.983063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ad1/r3tmp/tmp7dV07q/pdisk_1.dat 2025-10-24T16:27:20.082913Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.115388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.115424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.151198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8897, node 1 2025-10-24T16:27:20.233946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.233972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.233977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.234170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.238145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.272780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7384 2025-10-24T16:27:20.617438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.442829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.444907Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.445597Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.452082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.452110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.475390Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.476061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.557788Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.557823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.569660Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.633179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.654842Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655069Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655253Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655367Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655383Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655403Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655479Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.655499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.723532Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.729278Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.729316Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.735031Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.735387Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.735408Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.735412Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.735417Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.735421Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.735426Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.735431Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.735656Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.735869Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:21.736168Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.737762Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.737773Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.737785Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.741460Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:21.742061Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:21.743307Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.743341Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.744826Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:21.744870Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:21.813361Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.835842Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.939318Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.940994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.942235Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.942265Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:22.034379Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:22.242459Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.768993Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.818293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2224:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.818333Z no ... CS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4925:4479], ActorId: [2:4926:4480], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmY1ODA2ZTgtMjUxZjg0ZjItNTAyYmZjNWEtOTEwZjYyNTI=, TxId: 2025-10-24T16:27:48.520857Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4924:4478], ActorId: [2:4925:4479], Got response [2:4926:4480] SUCCESS 2025-10-24T16:27:48.520945Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:48.532262Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:48.532281Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:48.604453Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:48.604480Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:48.655810Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:4937:4491], schemeshard count = 1 2025-10-24T16:27:50.475099Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:50.475124Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:50.475132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:50.475136Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:50.476053Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:50.487888Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:50.488066Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:50.488083Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:50.488374Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:50.509871Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:50.509929Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:50.510093Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5053:4548], server id = [2:5057:4552], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:50.510338Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5053:4548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.510553Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5054:4549], server id = [2:5058:4553], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:50.510560Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5054:4549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.510579Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5055:4550], server id = [2:5059:4554], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:50.510583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5055:4550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.510637Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5056:4551], server id = [2:5060:4555], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:50.510641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5056:4551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.511503Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:50.511588Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5053:4548], server id = [2:5057:4552], tablet id = 72075186224037899 2025-10-24T16:27:50.511591Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.511877Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:50.511973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5054:4549], server id = [2:5058:4553], tablet id = 72075186224037900 2025-10-24T16:27:50.511977Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.512013Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:50.512046Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:50.512050Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:50.512084Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5055:4550], server id = [2:5059:4554], tablet id = 72075186224037901 2025-10-24T16:27:50.512086Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.512107Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:50.512123Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:50.512160Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5056:4551], server id = [2:5060:4555], tablet id = 72075186224037902 2025-10-24T16:27:50.512162Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.512268Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:50.554265Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:50.554322Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:50.554461Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5075:4566], server id = [2:5076:4567], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:50.554488Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5075:4566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.554811Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:50.554820Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:50.554856Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:50.554879Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:50.554930Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5078:4569], ActorId: [2:5079:4570], Starting query actor #1 [2:5080:4571] 2025-10-24T16:27:50.554939Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5079:4570], ActorId: [2:5080:4571], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:50.555392Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5075:4566], server id = [2:5076:4567], tablet id = 72075186224037900 2025-10-24T16:27:50.555399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.555522Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5079:4570], ActorId: [2:5080:4571], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NzgxMzAyZDctZWUwMjg1YWYtOWYxNDZmMDYtZWY4NDQxNGI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:50.560714Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5089:4580]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:50.560785Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:50.560791Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5089:4580], StatRequests.size() = 1 2025-10-24T16:27:50.584848Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5079:4570], ActorId: [2:5080:4571], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzgxMzAyZDctZWUwMjg1YWYtOWYxNDZmMDYtZWY4NDQxNGI=, TxId: 2025-10-24T16:27:50.584870Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5079:4570], ActorId: [2:5080:4571], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzgxMzAyZDctZWUwMjg1YWYtOWYxNDZmMDYtZWY4NDQxNGI=, TxId: 2025-10-24T16:27:50.584940Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5078:4569], ActorId: [2:5079:4570], Got response [2:5080:4571] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:50.585017Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5103:4586]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:50.585080Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:50.585180Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:50.585185Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:50.585323Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:50.585330Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:50.585336Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:50.586222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> AnalyzeColumnshard::AnalyzeStatus [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-10-24T16:27:21.571735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.605614Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.605673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.605698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a94/r3tmp/tmprZZXQX/pdisk_1.dat 2025-10-24T16:27:21.701456Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.734169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.734207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.766568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23641, node 1 2025-10-24T16:27:21.847305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.847320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.847324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.847436Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.849731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.884279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64515 2025-10-24T16:27:22.207482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.109965Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.124205Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.125108Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.137142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.137183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.168938Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.173050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.277893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.277944Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.292388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.356818Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.378618Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.389682Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.389881Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390037Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390154Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390197Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390237Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390255Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.390289Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.468255Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.482414Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.482450Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.492761Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.492799Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1865:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.493040Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.493120Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.493149Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.493154Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.493160Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.493166Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.493172Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.493180Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.493387Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.495210Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1876:2594] 2025-10-24T16:27:23.495281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1876:2594], schemeshard id = 72075186224037897 2025-10-24T16:27:23.547020Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1920:2612] 2025-10-24T16:27:23.557115Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.561280Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.561302Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.561313Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.563881Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.565811Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.565844Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.715479Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.784956Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.922757Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.786635Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.827945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2225:3056], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.827980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.828078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.828086Z node 1 ... 186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:49.369296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:49.369303Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:49.369307Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:49.369467Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:49.380369Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:49.380390Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:49.380490Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:49.380499Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:49.380635Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:49.380651Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:49.380741Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:49.412322Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:49.412349Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:49.412379Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:49.412530Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4073:3776], server id = [2:4074:3777], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:49.412551Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4073:3776], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:49.413276Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:49.413290Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:49.413329Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:49.413349Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:49.413399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4073:3776], server id = [2:4074:3777], tablet id = 72075186224037899 2025-10-24T16:27:49.413402Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:49.413434Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4078:3780], ActorId: [2:4079:3781], Starting query actor #1 [2:4080:3782] 2025-10-24T16:27:49.413441Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4079:3781], ActorId: [2:4080:3782], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:49.413939Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4079:3781], ActorId: [2:4080:3782], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTExMzhkOWItZDhkNTQzZGQtZDZjYTA4M2UtODA2Zjg5ZTc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:49.419506Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4089:3791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:49.419563Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:49.419569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4089:3791], StatRequests.size() = 1 2025-10-24T16:27:49.462699Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4079:3781], ActorId: [2:4080:3782], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTExMzhkOWItZDhkNTQzZGQtZDZjYTA4M2UtODA2Zjg5ZTc=, TxId: 2025-10-24T16:27:49.462719Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4079:3781], ActorId: [2:4080:3782], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTExMzhkOWItZDhkNTQzZGQtZDZjYTA4M2UtODA2Zjg5ZTc=, TxId: 2025-10-24T16:27:49.462781Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4078:3780], ActorId: [2:4079:3781], Got response [2:4080:3782] SUCCESS 2025-10-24T16:27:49.462912Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:49.474168Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:49.474186Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:49.752137Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:49.752162Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:50.107573Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:50.107595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:50.107599Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:50.816360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:50.816422Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:50.816430Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:50.816666Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:50.827806Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:50.827929Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:50.827941Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:50.828040Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:50.849298Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:50.849358Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:50.849545Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4177:3831], server id = [2:4178:3832], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:50.849569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4177:3831], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:50.849828Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:50.849837Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:50.849882Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:50.849900Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:50.849926Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4177:3831], server id = [2:4178:3832], tablet id = 72075186224037899 2025-10-24T16:27:50.849929Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:50.849975Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4180:3834], ActorId: [2:4181:3835], Starting query actor #1 [2:4182:3836] 2025-10-24T16:27:50.849983Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4181:3835], ActorId: [2:4182:3836], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:50.850549Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4181:3835], ActorId: [2:4182:3836], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjUwZmY4NGUtNDM5ZGViMmUtNjZkNGRhN2ItMzg0N2I0MzA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:50.885571Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4181:3835], ActorId: [2:4182:3836], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjUwZmY4NGUtNDM5ZGViMmUtNjZkNGRhN2ItMzg0N2I0MzA=, TxId: 2025-10-24T16:27:50.885596Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4181:3835], ActorId: [2:4182:3836], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjUwZmY4NGUtNDM5ZGViMmUtNjZkNGRhN2ItMzg0N2I0MzA=, TxId: 2025-10-24T16:27:50.885683Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4180:3834], ActorId: [2:4181:3835], Got response [2:4182:3836] SUCCESS 2025-10-24T16:27:50.885753Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:50.896983Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:50.897015Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3107:3329] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> KqpRm::Reduce >> KqpRm::ManyTasks >> KqpRm::NodesMembershipByExchanger >> KqpRm::SingleSnapshotByExchanger ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-10-24T16:27:21.540083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.566664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.566744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.566777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a9b/r3tmp/tmptXq0dp/pdisk_1.dat 2025-10-24T16:27:21.657324Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.692120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.692169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.724571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15012, node 1 2025-10-24T16:27:21.805703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.805724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.805729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.805906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.809007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.854322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18996 2025-10-24T16:27:22.163989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.027232Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.041240Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.042549Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.068654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.068695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.092838Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.093501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.184102Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.184152Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.197771Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.256492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.282917Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283137Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283342Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283426Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283504Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283533Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283552Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283587Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.283627Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.367579Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.374325Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.374352Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.382484Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.382578Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.382603Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.382607Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.382612Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.382617Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.382620Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.382626Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.382734Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.386847Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.386875Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1851:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.389315Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1874:2603] 2025-10-24T16:27:23.389394Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1874:2603], schemeshard id = 72075186224037897 2025-10-24T16:27:23.434703Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1903:2611] 2025-10-24T16:27:23.436172Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.440982Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.441006Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.441021Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.445112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.447526Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.447569Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.463291Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.581260Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.640317Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.756284Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.626519Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.737389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2226:3059], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.737432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.737547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2244:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.737557Z node 1 ... 37894] EvServerConnected, pipe server id = [2:3983:3723] 2025-10-24T16:27:49.431609Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3983:3723], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:49.431615Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:49.431635Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:3982:3722], server id = [2:3983:3723], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:49.431660Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:49.431672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:3980:3720], StatRequests.size() = 1 2025-10-24T16:27:49.431695Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:49.447627Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3970:3710], ActorId: [2:3971:3711], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTYxNjcyNDItYWRlYzVkNzAtYjBmNjk3NC0yYmNiMzg4Yg==, TxId: 2025-10-24T16:27:49.447665Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3970:3710], ActorId: [2:3971:3711], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTYxNjcyNDItYWRlYzVkNzAtYjBmNjk3NC0yYmNiMzg4Yg==, TxId: 2025-10-24T16:27:49.447755Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3969:3709], ActorId: [2:3970:3710], Got response [2:3971:3711] SUCCESS 2025-10-24T16:27:49.447822Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:49.458911Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:49.458928Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:49.531358Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:49.531385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:49.583253Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3982:3722], schemeshard count = 1 2025-10-24T16:27:50.236790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:50.236821Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:50.237580Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:50.259807Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:50.259968Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:50.259978Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:50.291973Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:50.292009Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR cookie 0 ... waiting for TEvAnalyzeTableResponse (done) 2025-10-24T16:27:50.292552Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4064:3765] 2025-10-24T16:27:50.292705Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:500: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-10-24T16:27:50.293072Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4066:3766]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:06Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalDatabase: 
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-10-24T16:27:50.293477Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:50.293505Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:50.315272Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:51.064128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:51.064174Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:51.064180Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:51.064331Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:51.075443Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:51.075538Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:51.075550Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:51.075788Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:51.097247Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:51.097312Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:51.097457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4112:3791], server id = [2:4113:3792], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:51.097479Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4112:3791], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:51.098103Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:51.098115Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:51.098148Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:51.098182Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:51.098209Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4112:3791], server id = [2:4113:3792], tablet id = 72075186224037899 2025-10-24T16:27:51.098212Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:51.098239Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4117:3795], ActorId: [2:4118:3796], Starting query actor #1 [2:4119:3797] 2025-10-24T16:27:51.098248Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4118:3796], ActorId: [2:4119:3797], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:51.098776Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4118:3796], ActorId: [2:4119:3797], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTkzYmNlMzUtMjdjMTkzMWYtNDNiNmI1MjAtZjZhZmQxZDk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:51.103219Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4128:3806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:51.103281Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:51.103288Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4128:3806], StatRequests.size() = 1 2025-10-24T16:27:51.138505Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4118:3796], ActorId: [2:4119:3797], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTkzYmNlMzUtMjdjMTkzMWYtNDNiNmI1MjAtZjZhZmQxZDk=, TxId: 2025-10-24T16:27:51.138528Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4118:3796], ActorId: [2:4119:3797], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTkzYmNlMzUtMjdjMTkzMWYtNDNiNmI1MjAtZjZhZmQxZDk=, TxId: 2025-10-24T16:27:51.138607Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4117:3795], ActorId: [2:4118:3796], Got response [2:4119:3797] SUCCESS 2025-10-24T16:27:51.138675Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:51.160385Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:51.160405Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:968:2752] 2025-10-24T16:27:51.160719Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4152:3814] 2025-10-24T16:27:51.160832Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:500: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] >> KqpRm::DisonnectNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-10-24T16:27:48.772739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:48.797765Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:48.797837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:48.797845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00188e/r3tmp/tmpBvFSeG/pdisk_1.dat 2025-10-24T16:27:48.861108Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:48.861783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:48.861811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:48.861896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323268492300 != 1761323268492303 2025-10-24T16:27:48.892711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:48.938222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:48.981103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:49.052987Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-10-24T16:27:49.053004Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-10-24T16:27:49.053017Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-10-24T16:27:49.063346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-10-24T16:27:49.063372Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-10-24T16:27:49.063522Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-10-24T16:27:49.063537Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-10-24T16:27:49.063581Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-10-24T16:27:49.063610Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-10-24T16:27:49.063619Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-10-24T16:27:49.063963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:49.064052Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-10-24T16:27:49.064141Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-10-24T16:27:49.064148Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-10-24T16:27:49.077648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:49.077883Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:49.077936Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:680:2569] 2025-10-24T16:27:49.077990Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:49.078918Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:49.084633Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:49.084684Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:49.084734Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:683:2571] 2025-10-24T16:27:49.084753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:49.085480Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:49.085539Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:49.085554Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:49.085644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:49.085650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:49.085654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:49.085691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:49.085705Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:49.085713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:712:2569] in generation 1 2025-10-24T16:27:49.085749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:49.085759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:49.085829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:27:49.085834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:27:49.085838Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:27:49.085854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:49.085862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:49.085866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:713:2571] in generation 1 2025-10-24T16:27:49.096085Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:49.102017Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:49.102060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:49.102075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:716:2590] 2025-10-24T16:27:49.102078Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:49.102082Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:49.102085Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:49.102103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:49.102107Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:27:49.102115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:49.102120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:717:2591] 2025-10-24T16:27:49.102122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:27:49.102124Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:27:49.102127Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:27:49.102158Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:680:2569], Recipient [1:680:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:49.102162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:49.102168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Send ... (s): CA [2:971:2749], 2025-10-24T16:27:51.428150Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2285: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-10-24T16:27:51.428367Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:971:2749], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-10-24T16:27:51.428373Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:971:2749], 2025-10-24T16:27:51.428377Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:971:2749], 2025-10-24T16:27:51.428512Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:971:2749], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 189 Tasks { TaskId: 1 CpuTimeUs: 95 FinishTimeMs: 1761323271428 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 88 HostName: "ghrun-aakgzge3ui" NodeId: 2 CreateTimeMs: 1761323271428 UpdateTimeMs: 1761323271428 } MaxMemoryUsage: 1048576 } 2025-10-24T16:27:51.428524Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:971:2749] 2025-10-24T16:27:51.428533Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:283: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. Send Commit to BufferActor=[2:967:2749] 2025-10-24T16:27:51.428540Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000189s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-10-24T16:27:51.428710Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [2:977:2765], Recipient [2:927:2732]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:51.428719Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:51.428726Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:975:2764], serverId# [2:977:2765], sessionId# [0:0:0] 2025-10-24T16:27:51.428760Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 278003712, Sender [2:974:2749], Recipient [2:927:2732]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-10-24T16:27:51.428765Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-10-24T16:27:51.428802Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435074, Sender [2:927:2732], Recipient [2:927:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-10-24T16:27:51.428806Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3184: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-10-24T16:27:51.428821Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-10-24T16:27:51.428835Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-10-24T16:27:51.428852Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:252: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-10-24T16:27:51.428856Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-10-24T16:27:51.428875Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CheckWrite 2025-10-24T16:27:51.428887Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:51.428890Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckWrite 2025-10-24T16:27:51.428894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:51.428897Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:51.428905Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976710661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-10-24T16:27:51.428915Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-10-24T16:27:51.428919Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:51.428921Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:51.428924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit BlockFailPoint 2025-10-24T16:27:51.428927Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit BlockFailPoint 2025-10-24T16:27:51.428930Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:51.428932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit BlockFailPoint 2025-10-24T16:27:51.428935Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit ExecuteWrite 2025-10-24T16:27:51.428938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:27:51.428942Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-10-24T16:27:51.428948Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976710661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-10-24T16:27:51.428978Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-10-24T16:27:51.428986Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-10-24T16:27:51.428996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:51.428999Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:27:51.429003Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-10-24T16:27:51.429006Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:51.429021Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:51.429023Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-10-24T16:27:51.429026Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:51.429029Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:51.429037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:51.429039Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:51.429042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037888 has finished 2025-10-24T16:27:51.439339Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:27:51.439366Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:51.439377Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-10-24T16:27:51.439407Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:51.439572Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1209: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-10-24T16:27:51.439580Z node 2 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1223: ActorId: [2:968:2749] TxId: 281474976710662. Ctx: { TraceId: 01k8bgrh7wbrzysxh5vw5g37ph, Database: , SessionId: ydb://session/3?node_id=2&id=MWY0NjhhOGUtZGE3MzAwNzktNzBkM2NkMTQtNzkyMTNiOWY=, PoolId: default, DatabaseId: /Root}. Terminate, become ZombieState |60.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> KqpRm::NotEnoughExecutionUnits |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |60.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] >> KqpRm::SingleTask >> KqpRm::Reduce [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::ManyTasks [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-10-24T16:27:21.529349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.558214Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.558298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.558334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a98/r3tmp/tmp3LEAcE/pdisk_1.dat 2025-10-24T16:27:21.654346Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.687136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.687176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.721848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10805, node 1 2025-10-24T16:27:21.802463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.802482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.802485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.802598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.804977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.839538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3615 2025-10-24T16:27:22.163557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.091983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.094650Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.095433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.107351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.107389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.130965Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.131947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.230268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.230311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.243645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.298049Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.320154Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320417Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320613Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320808Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320853Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320885Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.320904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.413128Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.421109Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.421148Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.440654Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.440746Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.440788Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.440796Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.440805Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.440814Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.440821Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.440833Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.441006Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.444445Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.444477Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.446801Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:23.446859Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:23.493190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:23.494443Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.499834Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.499877Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.499895Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.503165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.505764Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.505811Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.576594Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.651283Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.715346Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.820717Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.699150Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.762322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.762366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.762479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2249:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.762489Z node 1 : ... xecute. Node count = 1 2025-10-24T16:27:49.816221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4135:3809] 2025-10-24T16:27:49.816267Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4135:3809], schemeshard id = 72075186224037897 2025-10-24T16:27:49.816296Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4085:3782], server id = [2:4136:3810], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:49.816312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4136:3810] 2025-10-24T16:27:49.816328Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:4136:3810], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:49.868819Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:49.868884Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:49.869017Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4141:3815], server id = [2:4142:3816], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:49.869038Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4141:3815], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:49.869291Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:49.869298Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:49.869319Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:49.869344Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:49.869386Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4144:3818], ActorId: [2:4145:3819], Starting query actor #1 [2:4146:3820] 2025-10-24T16:27:49.869395Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4145:3819], ActorId: [2:4146:3820], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:49.869825Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4141:3815], server id = [2:4142:3816], tablet id = 72075186224037899 2025-10-24T16:27:49.869831Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:49.869919Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4145:3819], ActorId: [2:4146:3820], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDdkNzdjYmEtMmYzYzZlYWUtODc4M2VhYWUtMTZjMGExOGI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:49.874727Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4155:3829]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:49.874800Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:49.874805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4155:3829], StatRequests.size() = 1 2025-10-24T16:27:49.908686Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4145:3819], ActorId: [2:4146:3820], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDdkNzdjYmEtMmYzYzZlYWUtODc4M2VhYWUtMTZjMGExOGI=, TxId: 2025-10-24T16:27:49.908707Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4145:3819], ActorId: [2:4146:3820], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDdkNzdjYmEtMmYzYzZlYWUtODc4M2VhYWUtMTZjMGExOGI=, TxId: 2025-10-24T16:27:49.908765Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4144:3818], ActorId: [2:4145:3819], Got response [2:4146:3820] SUCCESS 2025-10-24T16:27:49.908834Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:49.919939Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:49.919956Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:49.972042Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4173:3837] 2025-10-24T16:27:49.972235Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3111:3335] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-10-24T16:27:49.972244Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:38: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3111:3335] 2025-10-24T16:27:49.972256Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:100: [72075186224037894] TTxAnalyze::Complete 2025-10-24T16:27:50.221010Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:50.221036Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:50.241758Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-10-24T16:27:50.241782Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:50.705856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:50.705883Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:50.705888Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:51.612082Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:51.612127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:51.612133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:51.612305Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:51.623412Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:51.623524Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:51.623534Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:51.623653Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:51.635190Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:51.635264Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-10-24T16:27:51.635435Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4246:3870], server id = [2:4247:3871], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:51.635466Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4246:3870], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:51.635801Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:51.635819Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:51.635871Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:51.635899Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:51.635990Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4249:3873], ActorId: [2:4250:3874], Starting query actor #1 [2:4251:3875] 2025-10-24T16:27:51.636004Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4250:3874], ActorId: [2:4251:3875], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:51.636637Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4246:3870], server id = [2:4247:3871], tablet id = 72075186224037899 2025-10-24T16:27:51.636650Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:51.636815Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4250:3874], ActorId: [2:4251:3875], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWI5ZGFiMWQtZWUwNTljYy04ZDY0OGMzYi04NGIzYWU3Yw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:51.650395Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4250:3874], ActorId: [2:4251:3875], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWI5ZGFiMWQtZWUwNTljYy04ZDY0OGMzYi04NGIzYWU3Yw==, TxId: 2025-10-24T16:27:51.650417Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4250:3874], ActorId: [2:4251:3875], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWI5ZGFiMWQtZWUwNTljYy04ZDY0OGMzYi04NGIzYWU3Yw==, TxId: 2025-10-24T16:27:51.650472Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4249:3873], ActorId: [2:4250:3874], Got response [2:4251:3875] SUCCESS 2025-10-24T16:27:51.650545Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:51.672065Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:51.672084Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3111:3335] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-10-24T16:27:48.809129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:48.833461Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:48.833518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:48.833524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00188d/r3tmp/tmpnO4Nx7/pdisk_1.dat 2025-10-24T16:27:48.895964Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:48.896577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:48.896599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:48.896672Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323268527079 != 1761323268527082 2025-10-24T16:27:48.927454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:48.974524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:49.006901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:49.089596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:49.102620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:49.102762Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:49.102800Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:49.102839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:49.103469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:49.108736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:49.108758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:49.108851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:49.108857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:49.108861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:49.108899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:49.108912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:49.108920Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:49.119083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:49.122034Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:49.122074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:49.122089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:49.122092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:49.122096Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:49.122099Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:49.122132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:49.122136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:49.122198Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:49.122209Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:49.122222Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:49.122228Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:49.122232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:49.122236Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:49.122238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:49.122242Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:49.122245Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:49.122253Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:49.122256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:49.122260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:49.122317Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:49.122320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:49.122332Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:49.122368Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:49.122375Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:49.122385Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:49.122390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:49.122393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:49.122396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:49.122399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:49.122460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:49.122463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:49.122465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:49.122468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:49.122473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:49.122476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:49.122480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:49.122482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:49.122486Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:49.122662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:49.122668Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:49.132867Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:49.132885Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:49.132891Z n ... 6:27:51.633762Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877763, Sender [2:1159:2903], Recipient [2:675:2566]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037891 ClientId: [2:1159:2903] ServerId: [2:1163:2907] } 2025-10-24T16:27:51.633767Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-10-24T16:27:51.633776Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877763, Sender [2:1157:2901], Recipient [2:759:2626]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1157:2901] ServerId: [2:1161:2905] } 2025-10-24T16:27:51.633780Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-10-24T16:27:51.633808Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:1027:2811], Recipient [2:1027:2811]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:51.633812Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:51.633878Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1021:2807]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 2000} 2025-10-24T16:27:51.633883Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-10-24T16:27:51.633888Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-10-24T16:27:51.633893Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-10-24T16:27:51.633908Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-10-24T16:27:51.633914Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:51.633922Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037891 2025-10-24T16:27:51.633927Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037891 has no attached operations 2025-10-24T16:27:51.633932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037891 2025-10-24T16:27:51.633938Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-10-24T16:27:51.633961Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-10-24T16:27:51.633973Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877764, Sender [2:1161:2905], Recipient [2:1027:2811]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:51.633977Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3194: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:51.633982Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1157:2901], serverId# [2:1161:2905], sessionId# [0:0:0] 2025-10-24T16:27:51.634003Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877764, Sender [2:1163:2907], Recipient [2:1021:2807]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:51.634007Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3194: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:27:51.634010Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:1159:2903], serverId# [2:1163:2907], sessionId# [0:0:0] 2025-10-24T16:27:51.634016Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2025-10-24T16:27:51.634020Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:51.634023Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2025-10-24T16:27:51.634027Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-10-24T16:27:51.634030Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2025-10-24T16:27:51.634033Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2025-10-24T16:27:51.634039Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-10-24T16:27:51.634112Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1027:2811]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-10-24T16:27:51.634117Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-10-24T16:27:51.634122Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-10-24T16:27:51.634126Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-10-24T16:27:51.634252Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1021:2807]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-10-24T16:27:51.634257Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3196: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-10-24T16:27:51.634261Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 2000 2025-10-24T16:27:51.634266Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-10-24T16:27:51.634272Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:27:51.634288Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1027:2811]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-10-24T16:27:51.634291Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3196: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-10-24T16:27:51.634293Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-10-24T16:27:51.634297Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-10-24T16:27:51.634299Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:27:51.644651Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037888 ack split to schemeshard 281474976710664 2025-10-24T16:27:51.644989Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976710665 2025-10-24T16:27:51.645690Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553158, Sender [2:398:2397], Recipient [2:677:2567] 2025-10-24T16:27:51.645713Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976710664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2025-10-24T16:27:51.646360Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553158, Sender [2:398:2397], Recipient [2:766:2630] 2025-10-24T16:27:51.646378Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976710665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-10-24T16:27:51.646881Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037888 ack split partitioning changed to schemeshard 281474976710664 2025-10-24T16:27:51.646897Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-10-24T16:27:51.647048Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [2:666:2560], Recipient [2:675:2566]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:27:51.647166Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976710665 2025-10-24T16:27:51.647175Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-10-24T16:27:51.647191Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 268828683, Sender [2:752:2621], Recipient [2:759:2626]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-10-24T16:27:51.984708Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [2:973:2668], Recipient [2:675:2566]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 973 RawX2: 8589937260 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\315\003\000\000\000\000\000\000\021l\n\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-10-24T16:27:51.984730Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:51.984752Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3109: Rejecting data TxId 281474976710663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-10-24T16:27:51.984845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-10-24T16:27:51.984921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 >> KqpRm::NotEnoughMemory ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-10-24T16:27:51.832262Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:51.832432Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:51.832482Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:51.832592Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:51.840789Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:51.842253Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:51.842296Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:51.842560Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:51.842624Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:51.842664Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.842670Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.842701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:51.844824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:51.844862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:51.844872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:51.844882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.844890Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.844897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.866314Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.867341Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:51.867411Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.867770Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:51.898972Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.899021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:51.909725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:51.909775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:51.909787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:51.909798Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:51.909817Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:51.909824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:51.909831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:51.909837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:51.920485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:51.920525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:51.931179Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:51.931239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:51.931407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:51.931412Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:51.932793Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:51.932803Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:51.933109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:51.933125Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:51.933352Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:51.933411Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat 2025-10-24T16:27:51.933416Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat 2025-10-24T16:27:51.933549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:51.933556Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:51.933607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:51.933621Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:51.933631Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:51.933658Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:51.933681Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:51.933942Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:51.933972Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:51.944629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:51.944661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:51.946471Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:51.946598Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:51.946694Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/00298f/r3tmp/tmp2EnD5V/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18158990140845949320 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:51.946822Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:51.946860Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:51.946864Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:51.946890Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:51.946900Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:51.948021Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:51.948049Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:51.948060Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:51.948066Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:51.948074Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:51.948146Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:51.948152Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:51.948155Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:51.948176Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:51.948440Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:51.948445Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:51.948448Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:51.948453Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:51.948686Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:51.948689Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:51.948704Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:51.948707Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:51.948841Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:51.948848Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:51.948853Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:51.948867Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:51.948967Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:51.948999Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:51.949003Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:51.949005Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:51.961956Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:51.975185Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:51.975201Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:51.975214Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:51.975218Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-10-24T16:27:46.682571Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:46.694458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:46.694474Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:46.694832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:46.694870Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:46.694914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:46.695592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:46.703276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:46.703309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:46.703413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:46.703420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:46.703425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:46.703482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:46.703489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:46.703498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:46.739473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:46.742732Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:46.742797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:46.742819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:46.742824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:46.742830Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:46.742835Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:46.742887Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:46.742893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:46.742964Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:46.742987Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:46.742995Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:46.743001Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:46.743007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:46.743012Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:46.743017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:46.743025Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:46.743030Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:46.743039Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:46.743043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:46.743049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:46.743480Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:46.743490Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:46.743500Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:46.743528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:46.743536Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:46.743543Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:46.743548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:46.743550Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:46.743554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:46.743566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:46.743619Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:46.743621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:46.743624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:46.743626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:46.743633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:46.743635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:46.743637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:46.743640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:46.743643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:46.754177Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:46.754194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:46.754208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:46.754217Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:46.754228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:46.754290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:46.754295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:46.754300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:46.754312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:46.754315Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:46.754335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:46.754339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:46.754342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:46.754345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:46.756742Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:46.756761Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:46.756804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:46.756809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:46.756816Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:46.756821Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:46.756825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:46.756831Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:46.756835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... 0: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:51.513519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:51.513522Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:51.513544Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:51.513547Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-10-24T16:27:51.513551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:51.513554Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:51.513569Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:51.513572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-10-24T16:27:51.513576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:51.513580Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:51.513583Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:51.513608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-10-24T16:27:51.513611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:51.513614Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-10-24T16:27:51.513638Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-10-24T16:27:51.513641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:51.513643Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-10-24T16:27:51.513650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [1:240:2232], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-10-24T16:27:51.513653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:51.513656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-10-24T16:27:51.513665Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-10-24T16:27:51.513672Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-10-24T16:27:51.513682Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-10-24T16:27:51.513692Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:51.513696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:51.513698Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-10-24T16:27:51.513706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:51.513710Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:51.513714Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-10-24T16:27:51.513718Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:27:51.513723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-10-24T16:27:51.513726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-10-24T16:27:51.513732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:51.513735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-10-24T16:27:51.513738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2025-10-24T16:27:51.513741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2025-10-24T16:27:51.513743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:51.513746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2025-10-24T16:27:51.513748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-10-24T16:27:51.513750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-10-24T16:27:51.513862Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-10-24T16:27:51.513869Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:27:51.513876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-10-24T16:27:51.513878Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-10-24T16:27:51.513881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-10-24T16:27:51.513883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-10-24T16:27:51.513936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-10-24T16:27:51.513940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-10-24T16:27:51.513943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-10-24T16:27:51.513945Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-10-24T16:27:51.513949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:51.513951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-10-24T16:27:51.513954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000005:152] at 9437186 has finished 2025-10-24T16:27:51.513956Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:51.513959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-10-24T16:27:51.513961Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-10-24T16:27:51.513964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-10-24T16:27:51.513997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:51.514000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:51.514002Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-10-24T16:27:51.524798Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-10-24T16:27:51.524812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-10-24T16:27:51.524825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:105:2138], exec latency: 2 ms, propose latency: 4 ms 2025-10-24T16:27:51.524833Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:51.524838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-10-24T16:27:51.524890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:51.524894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:51.524897Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> KqpRm::NotEnoughExecutionUnits [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-10-24T16:27:51.941708Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:51.941886Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:51.941941Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:51.942046Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:51.949569Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:51.951002Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:51.951038Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:51.951272Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:51.951337Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:51.951372Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.951376Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.951404Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:51.953334Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:51.953363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:51.953372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:51.953383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.953392Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.953399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.974639Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.975208Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:51.975269Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.975600Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.006543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.006573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.017140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.017171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.017181Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.017189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.017207Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.017214Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.017219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.017225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.027787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.027819Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.038481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.038529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.038707Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.038712Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.040138Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.040149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.040528Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.040545Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.040777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.040845Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat 2025-10-24T16:27:52.040850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat 2025-10-24T16:27:52.041001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.041009Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.041062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.041077Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.041088Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.041115Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.041143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.041451Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.041488Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.052140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.052185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.054030Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.054162Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.054275Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/0029d2/r3tmp/tmpqhCofO/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17923354997945881685 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.054413Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.054450Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.054455Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.054478Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.054488Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.055605Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.055644Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.055656Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.055664Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.055671Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.055735Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.055741Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.055745Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.055765Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.056051Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.056059Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.056061Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.056067Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.056306Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.056310Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.056328Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.056331Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.056496Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.056503Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.056510Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.056512Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.056618Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.056651Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.056655Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.056657Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.069526Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.082814Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.082832Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.082846Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.082850Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. >> KqpRm::SingleTask [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-10-24T16:27:49.283673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:49.305515Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:49.305569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:49.305575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00188b/r3tmp/tmpph9IVA/pdisk_1.dat 2025-10-24T16:27:49.365085Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:49.365593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:49.365616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:49.365690Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323269000607 != 1761323269000610 2025-10-24T16:27:49.396371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:49.441522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:49.484080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:49.556550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:49.739700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:49.824424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:49.939819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:49.939841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:918:2719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:49.939878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:49.940019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:49.940033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:49.940561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:50.074763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:932:2727], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:27:50.110548Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:990:2766] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:50.160712Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgrfsk1e24q1ten1fmwyv8, Database: , SessionId: ydb://session/3?node_id=1&id=OGZjYzdmZDUtOGRlNjczMGItYWM4ZDJlMDAtMTI3NzdiNTY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.181453Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k8bgrg0y0keahj8eh81j7pna, Database: , SessionId: ydb://session/3?node_id=1&id=ODEzOWVhNjQtM2VjZmU4NjEtOWYxZjVjZGQtODRmOTcwYjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:50.242215Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k8bgrg1j5yf5622c6h356gnk, Database: , SessionId: ydb://session/3?node_id=1&id=NDEwZDVkYTUtM2JiMjY2YTItZGI2NGE3OGMtMzgxNGNjNmY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-10-24T16:27:50.255480Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715664. Ctx: { TraceId: 01k8bgrg3539fncssnv3mq0e8z, Database: , SessionId: ydb://session/3?node_id=1&id=NDEwZDVkYTUtM2JiMjY2YTItZGI2NGE3OGMtMzgxNGNjNmY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-10-24T16:27:50.275526Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgrg3ve6mmw4q3neg93mb8, Database: , SessionId: ydb://session/3?node_id=1&id=N2Y4ZWNhNGUtZjgxMTlmOWItYTFhNDk2ZTQtYzRiYzIxZjY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root ... immediate upsert is blocked 2025-10-24T16:27:50.276600Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-10-24T16:27:50.277744Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:843: SelfId: [1:1187:2819], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1067:2819]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1187:2819]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-10-24T16:27:50.277818Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [1:1180:2819], SessionActorId: [1:1067:2819], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1067:2819]. 2025-10-24T16:27:50.278376Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=1&id=N2Y4ZWNhNGUtZjgxMTlmOWItYTFhNDk2ZTQtYzRiYzIxZjY=, ActorId: [1:1067:2819], ActorState: ExecuteState, TraceId: 01k8bgrg3ve6mmw4q3neg93mb8, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1181:2819] from: [1:1180:2819] 2025-10-24T16:27:50.278430Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [1:1181:2819] TxId: 281474976715665. Ctx: { TraceId: 01k8bgrg3ve6mmw4q3neg93mb8, Database: , SessionId: ydb://session/3?node_id=1&id=N2Y4ZWNhNGUtZjgxMTlmOWItYTFhNDk2ZTQtYzRiYzIxZjY=, PoolId: default, DatabaseId: /Root}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-10-24T16:27:50.278715Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=N2Y4ZWNhNGUtZjgxMTlmOWItYTFhNDk2ZTQtYzRiYzIxZjY=, ActorId: [1:1067:2819], ActorState: ExecuteState, TraceId: 01k8bgrg3ve6mmw4q3neg93mb8, Create QueryResponse for error on request, msg: 2025-10-24T16:27:50.279267Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1236: SelfId: [1:1153:2821], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:1069:2821]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-10-24T16:27:50.279281Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [1:1143:2821], SessionActorId: [1:1069:2821], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1069:2821]. 2025-10-24T16:27:50.279345Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=1&id=NDEwZDVkYTUtM2JiMjY2YTItZGI2NGE3OGMtMzgxNGNjNmY=, ActorId: [1:1069:2821], ActorState: ExecuteState, TraceId: 01k8bgrg3539fncssnv3mq0e8z, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1144:2821] from: [1:1143:2821] 2025-10-24T16:27:50.279374Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [1:1144:2821] TxId: 281474976715664. Ctx: { TraceId: 01k8bgrg3539fncssnv3mq0e8z, Database: , SessionId: ydb://session/3?node_id=1&id=NDEwZDVkYTUtM2JiMjY2YTItZGI2NGE3OGMtMzgxNGNjNmY=, PoolId: default, DatabaseId: /Root}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-10-24T16:27:50.279412Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=NDEwZDVkYTUtM2JiMjY2YTItZGI2NGE3OGMtMzgxNGNjNmY=, ActorId: [1:1069:2821], ActorState: ExecuteState, TraceId: 01k8bgrg3539fncssnv3mq0e8z, Create QueryResponse for error on request, msg: 2025-10-24T16:27:50.415028Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k8bgrg7w8ssjh4f6ys4kccm0, Database: , SessionId: ydb://session/3?node_id=1&id=M2E5YTBhNWMtNjM1Njk4NjUtZjg0M2FlZDMtNWFjYWViYzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-10-24T16:27:50.927839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:50.929301Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:50.929697Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:50.929740Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:50.929757Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00188b/r3tmp/tmpBTo7GL/pdisk_1.dat 2025-10-24T16:27:50.977972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:50.978002Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:50.980920Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:50.981238Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1761323270668017 != 1761323270668021 2025-10-24T16:27:51.012032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:51.054055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:51.085557Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:51.168476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:51.344375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:51.429349Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:51.544564Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2671], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:51.544585Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:840:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:51.544593Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:51.544714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:845:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:51.544722Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:51.545295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:51.680746Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2679], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:27:51.712640Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:902:2718] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:51.720444Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710661. Ctx: { TraceId: 01k8bgrhbrc3q9ze8kxcda05bv, Database: , SessionId: ydb://session/3?node_id=2&id=OTE2NTc1OWMtOTI1MDQyOWMtNTE3YWNhNzUtZTcxMmU1N2Q=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:51.740157Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710662. Ctx: { TraceId: 01k8bgrhhmf33q245g5sdd0yh0, Database: , SessionId: ydb://session/3?node_id=2&id=MjEwZjFmYmQtMzU3NTYzOTYtZmI0MmE5YTItOGI1MzdiYWM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root ... waiting for readsets 2025-10-24T16:27:52.076268Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1236: SelfId: [2:989:2757], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [2:964:2757]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-10-24T16:27:52.076301Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [2:979:2757], SessionActorId: [2:964:2757], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:964:2757]. 2025-10-24T16:27:52.076352Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1096: TxId: 281474976710664, task: 1, CA Id [2:1015:2796]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-10-24T16:27:52.076523Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=2&id=NWMxNDExZS04Yzc4ZjUzYi05ZDI2ODMyYi0xNmZlOTQ2Mw==, ActorId: [2:964:2757], ActorState: ExecuteState, TraceId: 01k8bgrhj87rz0j2byts1dd0tv, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:980:2757] from: [2:979:2757] 2025-10-24T16:27:52.076554Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [2:980:2757] TxId: 281474976710663. Ctx: { TraceId: 01k8bgrhj87rz0j2byts1dd0tv, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NWMxNDExZS04Yzc4ZjUzYi05ZDI2ODMyYi0xNmZlOTQ2Mw==, PoolId: default}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-10-24T16:27:52.076625Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=2&id=NWMxNDExZS04Yzc4ZjUzYi05ZDI2ODMyYi0xNmZlOTQ2Mw==, ActorId: [2:964:2757], ActorState: ExecuteState, TraceId: 01k8bgrhj87rz0j2byts1dd0tv, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIteratorConsistency::BrokenWriteLockBeforeIteration >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::NotEnoughMemory [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-10-24T16:27:52.157778Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:52.157939Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:52.157992Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:52.158101Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:52.165573Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.166909Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.166944Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:52.167170Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.167234Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:52.167293Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.167299Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.167327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:52.169191Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:52.169219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:52.169228Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:52.169238Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.169246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.169253Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.190431Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.191140Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:52.191200Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.191525Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.222543Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.222584Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.233415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.233464Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.233484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.233499Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.233529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.233539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.233547Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.233571Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.244312Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.244351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.255129Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.255189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.255459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.255466Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.257653Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.257670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.258169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.258192Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.258530Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.258616Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat 2025-10-24T16:27:52.258622Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat 2025-10-24T16:27:52.258823Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.258835Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.258909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.258933Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.258949Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.258993Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.259030Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.259434Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.259487Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.270276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.270317Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.272636Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.272800Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.272961Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/002981/r3tmp/tmpvwa3L0/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3869485931976399874 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.273153Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.273210Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.273217Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.273275Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.273286Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.274704Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.274744Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.274756Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.274764Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.274772Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.274838Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.274845Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.274848Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.274870Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.275148Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.275153Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.275155Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.275160Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.275425Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.275432Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.275451Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.275454Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.275650Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.275658Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.275665Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.275668Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.275778Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.275811Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.275816Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.275818Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.289137Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.303011Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.303032Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.303047Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.303051Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. >> DataShardTxOrder::RandomPointsAndRanges >> KqpRm::SnapshotSharingByExchanger ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-10-24T16:27:52.278793Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:52.278953Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:52.279005Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:52.279107Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:52.286155Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.287512Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.287549Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:52.287778Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.287838Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:52.287895Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.287899Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.287923Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:52.290073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:52.290101Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:52.290108Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:52.290117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.290125Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.290131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.311522Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.312275Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:52.312335Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.312715Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.344057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.344103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.354868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.354910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.354925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.354936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.354960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.354967Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.354973Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.354980Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.365821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.365868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.376657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.376712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.376929Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.376937Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.379185Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.379202Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.379617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.379657Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.379990Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.380068Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat 2025-10-24T16:27:52.380075Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat 2025-10-24T16:27:52.380253Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.380265Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.380338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.380358Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.380370Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.380407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.380438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.380803Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.380864Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.391677Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.391719Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.394132Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.394299Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.394443Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/002970/r3tmp/tmpymFiVy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14406079435828891460 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.394585Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.394634Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.394640Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.394673Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.394682Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.396384Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.396425Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.396439Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.396452Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.396464Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.396544Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.396552Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.396557Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.396582Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.396968Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.396981Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.396985Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.396994Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.397397Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.397408Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.397434Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.397438Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.397645Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.397656Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.397663Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.397667Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.397818Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.397864Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.397871Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.397874Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.411497Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.424959Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.424975Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.424988Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.424991Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-10-24T16:27:19.700017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.729909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.730001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.730038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002b1b/r3tmp/tmpnAHCuT/pdisk_1.dat 2025-10-24T16:27:19.829904Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:19.865555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:19.865600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.898719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1702, node 1 2025-10-24T16:27:19.987808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:19.987868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:19.987874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:19.988088Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:19.992370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.018556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23387 2025-10-24T16:27:20.407962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.300773Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.303656Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.304614Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.312947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.312985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.336342Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.336942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.419295Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.419333Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.431161Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.494422Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.515884Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516063Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516194Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516300Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516319Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516340Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516380Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516397Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.516413Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.582154Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.587575Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.587602Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.593437Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.593776Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.593807Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.593812Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.593819Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.593825Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.593831Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.593837Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.594176Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.594441Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:21.594775Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.597015Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.597031Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.597042Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.601926Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:21.602804Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:21.604235Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.604268Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.605969Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:21.606047Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:21.668164Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.689592Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.825310Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.826889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.827901Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.827928Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:21.905370Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:22.123240Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.607065Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.651501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.651550Z n ... e 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:52.081702Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:52.081762Z node 2 :STATISTICS DEBUG: tx_init.cpp:55: [72075186224037894] Loaded database: /Root/Database 2025-10-24T16:27:52.081771Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2025-10-24T16:27:52.081776Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-10-24T16:27:52.081779Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 4 2025-10-24T16:27:52.081783Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1761323272029212 2025-10-24T16:27:52.081786Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-10-24T16:27:52.081789Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2025-10-24T16:27:52.081792Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-10-24T16:27:52.081798Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-10-24T16:27:52.081802Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:52.081812Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-10-24T16:27:52.081816Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:52.081822Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:52.081829Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:52.081860Z node 2 :STATISTICS DEBUG: tx_init.cpp:303: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.082073Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:52.082300Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:52.082313Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:52.082504Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:52.082547Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:52.082554Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:52.082769Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:52.094728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5108:4584] 2025-10-24T16:27:52.094770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5064:4556], server id = [2:5108:4584], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:52.094809Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:5108:4584], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:52.094855Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5109:4585] 2025-10-24T16:27:52.094870Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5109:4585], schemeshard id = 72075186224037897 2025-10-24T16:27:52.126989Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:52.127057Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:52.127334Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5114:4589], server id = [2:5118:4593], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:52.127736Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5114:4589], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.127840Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5115:4590], server id = [2:5119:4594], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:52.127869Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5115:4590], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.128126Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5116:4591], server id = [2:5120:4595], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:52.128139Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5116:4591], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.128311Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5117:4592], server id = [2:5121:4596], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:52.128322Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5117:4592], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.129694Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:52.129933Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5114:4589], server id = [2:5118:4593], tablet id = 72075186224037899 2025-10-24T16:27:52.129944Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.130158Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:52.130233Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5115:4590], server id = [2:5119:4594], tablet id = 72075186224037900 2025-10-24T16:27:52.130238Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.130291Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:52.130323Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:52.130328Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:52.130368Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:52.130394Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:52.130475Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5134:4605], ActorId: [2:5135:4606], Starting query actor #1 [2:5136:4607] 2025-10-24T16:27:52.130485Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5135:4606], ActorId: [2:5136:4607], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:52.131106Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5117:4592], server id = [2:5121:4596], tablet id = 72075186224037902 2025-10-24T16:27:52.131116Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.131186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5116:4591], server id = [2:5120:4595], tablet id = 72075186224037901 2025-10-24T16:27:52.131192Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.131318Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5135:4606], ActorId: [2:5136:4607], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTMyM2NlYjMtMmU0YTI5NmItZWE5ZTVlZDQtZTc0OWVkNmQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:52.140514Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5145:4616]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.140576Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:52.140583Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5145:4616], StatRequests.size() = 1 2025-10-24T16:27:52.197003Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5135:4606], ActorId: [2:5136:4607], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTMyM2NlYjMtMmU0YTI5NmItZWE5ZTVlZDQtZTc0OWVkNmQ=, TxId: 2025-10-24T16:27:52.197026Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5135:4606], ActorId: [2:5136:4607], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTMyM2NlYjMtMmU0YTI5NmItZWE5ZTVlZDQtZTc0OWVkNmQ=, TxId: 2025-10-24T16:27:52.197088Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5134:4605], ActorId: [2:5135:4606], Got response [2:5136:4607] SUCCESS 2025-10-24T16:27:52.197167Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:52.208398Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.208414Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:52.271224Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5168:4624]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.271328Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.271334Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:52.271399Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.271407Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:52.271413Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:52.272589Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-10-24T16:27:52.431094Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:52.431259Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:52.431314Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:52.431415Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:52.438718Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.440119Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.440155Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:52.440379Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.440439Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:52.440471Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.440475Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.440496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:52.442540Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:52.442572Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:52.442580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:52.442590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.442598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.442604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.464027Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.464897Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:52.464986Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.465370Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.496521Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.496564Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.507233Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.507268Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.507278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.507286Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.507303Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.507309Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.507314Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.507320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.518035Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.518072Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.528718Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.528765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.528919Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.528924Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.530254Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.530263Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.530593Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.530611Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.530840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.530894Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat 2025-10-24T16:27:52.530898Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat 2025-10-24T16:27:52.531039Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.531046Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.531099Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.531114Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.531124Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.531152Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.531176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.531452Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.531485Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.542415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.542484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.544714Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.544849Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.544974Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/002946/r3tmp/tmpxbMIvy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6679436304085627798 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.545116Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.545161Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.545165Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.545197Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.545205Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.546583Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.546610Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.546619Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.546627Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.546634Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.546710Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.546716Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.546719Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.546737Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.547032Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.547037Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.547039Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.547045Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.547311Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.547316Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.547331Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.547334Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.547506Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.547513Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.547519Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.547521Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.547620Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.547663Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.547668Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.547670Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.560622Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.573909Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.573926Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.573940Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.573946Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-10-24T16:27:52.466907Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:52.467094Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:52.467154Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:52.467270Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:52.475242Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.476770Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.476820Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:52.477053Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.477128Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:52.477167Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.477171Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.477194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:52.479085Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:52.479129Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:52.479138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:52.479147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.479155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.479162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.500491Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.501237Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:52.501341Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.501752Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.532801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.532839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.543392Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.543423Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.543432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.543441Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.543466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.543472Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.543476Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.543482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.553976Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.554007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.564629Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.564678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.564872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.564877Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.566488Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.566501Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.566847Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.566869Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.567119Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.567175Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat 2025-10-24T16:27:52.567180Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat 2025-10-24T16:27:52.567323Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.567330Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.567393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.567413Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.567424Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.567453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.567477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.567779Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.567823Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.578416Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.578451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.580136Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.580251Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.580355Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/00291e/r3tmp/tmpWS0scf/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5669038414853340190 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.580474Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.580510Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.580514Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.580539Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.580545Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.581667Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.581692Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.581701Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.581708Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.581715Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.581773Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.581778Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.581781Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.581799Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.582044Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.582049Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.582051Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.582055Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.582270Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.582275Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.582289Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.582292Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.582430Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.582437Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.582443Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.582445Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.582539Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.582570Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.582575Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.582577Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.595611Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.608650Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.608668Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.608682Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.608686Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> Sharding::XXUsage [GOOD] |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> KqpRm::SingleSnapshotByExchanger [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-10-24T16:27:19.693083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.723448Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.723546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.723579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002b0d/r3tmp/tmpaumDD9/pdisk_1.dat 2025-10-24T16:27:19.822169Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:19.855196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:19.855241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.888995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23706, node 1 2025-10-24T16:27:19.972486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:19.972514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:19.972519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:19.972696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:19.976571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.011678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16250 2025-10-24T16:27:20.359366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.237354Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.240143Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.241113Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.249800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.249828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.272685Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.273153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.355529Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.355563Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.367323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.430779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.452434Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.452655Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.452857Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.452984Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.453004Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.453027Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.453079Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.453106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.453129Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.522669Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.528393Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.528426Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.534630Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.534975Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.535007Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.535013Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.535020Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.535026Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.535033Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.535040Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.535422Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.535649Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:21.536069Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.538372Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.538387Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.538399Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.542349Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:21.543038Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:21.544291Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.544319Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.545906Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:21.545963Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:21.605629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.627231Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.763530Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.765297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.766496Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.766523Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:21.846722Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:22.054634Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.549926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.601145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.601195Z ... e 2025-10-24T16:27:52.494240Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:52.494385Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:52.494421Z node 2 :STATISTICS DEBUG: tx_init.cpp:55: [72075186224037894] Loaded database: /Root/Database 2025-10-24T16:27:52.494428Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2025-10-24T16:27:52.494433Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-10-24T16:27:52.494436Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 4 2025-10-24T16:27:52.494439Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1761323272463424 2025-10-24T16:27:52.494442Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-10-24T16:27:52.494445Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-10-24T16:27:52.494452Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-10-24T16:27:52.494456Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:52.494466Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-10-24T16:27:52.494470Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:52.494474Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:52.494478Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:52.494514Z node 2 :STATISTICS DEBUG: tx_init.cpp:303: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.494757Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:52.494815Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:52.494844Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:52.494851Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2025-10-24T16:27:52.507546Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5103:4576] 2025-10-24T16:27:52.507668Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5103:4576], schemeshard id = 72075186224037897 2025-10-24T16:27:52.507688Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5060:4549], server id = [2:5104:4577], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:52.507712Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5104:4577] 2025-10-24T16:27:52.507730Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:5104:4577], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-10-24T16:27:52.633050Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:52.633081Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:52.633419Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:52.644726Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:52.644773Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:52.644979Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5118:4584], server id = [2:5122:4588], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:52.645242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5118:4584], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.645315Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5119:4585], server id = [2:5123:4589], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:52.645321Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5119:4585], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.645490Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5120:4586], server id = [2:5125:4591], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:52.645496Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5120:4586], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.645587Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5121:4587], server id = [2:5124:4590], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:52.645592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5121:4587], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.646553Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:52.646641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5118:4584], server id = [2:5122:4588], tablet id = 72075186224037899 2025-10-24T16:27:52.646646Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.646705Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:52.646768Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5119:4585], server id = [2:5123:4589], tablet id = 72075186224037900 2025-10-24T16:27:52.646770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.646822Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:52.646889Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5120:4586], server id = [2:5125:4591], tablet id = 72075186224037901 2025-10-24T16:27:52.646892Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.646916Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:52.646920Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:52.646962Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:52.646985Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:52.647031Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5138:4600], ActorId: [2:5139:4601], Starting query actor #1 [2:5140:4602] 2025-10-24T16:27:52.647039Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5139:4601], ActorId: [2:5140:4602], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:52.647481Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5121:4587], server id = [2:5124:4590], tablet id = 72075186224037902 2025-10-24T16:27:52.647487Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.647625Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5139:4601], ActorId: [2:5140:4602], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjJhODM0MC0yY2UxMWI4Mi0yNGM1OTY3Ny03ZDIzYTI0ZA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:52.656079Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5149:4611]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.656159Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:52.656173Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5149:4611], StatRequests.size() = 1 2025-10-24T16:27:52.716584Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5139:4601], ActorId: [2:5140:4602], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjJhODM0MC0yY2UxMWI4Mi0yNGM1OTY3Ny03ZDIzYTI0ZA==, TxId: 2025-10-24T16:27:52.716608Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5139:4601], ActorId: [2:5140:4602], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjJhODM0MC0yY2UxMWI4Mi0yNGM1OTY3Ny03ZDIzYTI0ZA==, TxId: 2025-10-24T16:27:52.716708Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5138:4600], ActorId: [2:5139:4601], Got response [2:5140:4602] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:52.716805Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5162:4617]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.716888Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:52.717088Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.717094Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:52.717140Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.717147Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:52.717152Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:52.718276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-10-24T16:27:20.767616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:20.795013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:20.797752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:20.797856Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:20.797871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ac0/r3tmp/tmpBLUBBp/pdisk_1.dat 2025-10-24T16:27:20.896507Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.932916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.932962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.960882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19945, node 1 2025-10-24T16:27:21.057245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.057276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.057281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.057395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.061527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.094268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7724 2025-10-24T16:27:21.425628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.219444Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.221632Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.222752Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.231998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.232050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.266183Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.267432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.347982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.348039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.348425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.348549Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.348718Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.348939Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.348967Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.349035Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.349062Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.349096Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.349126Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.361094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.426064Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.438612Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.438653Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.444841Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.445097Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.445124Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.445130Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.445136Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.445143Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.445149Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.445157Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.445313Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.446317Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1613:2453] 2025-10-24T16:27:22.448256Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.449891Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.449909Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.449920Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.452905Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.452932Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1845:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.455274Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1863:2606] 2025-10-24T16:27:22.455355Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1863:2606], schemeshard id = 72075186224037897 2025-10-24T16:27:22.461605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.463410Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.463450Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:22.519560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.592907Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.640962Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:22.728326Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:22.874483Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.567970Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.616839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2210:3050], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.616885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.616980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3055], DatabaseId: /Root, PoolId: default, Failed to fetch p ... database: /Root/Database 2025-10-24T16:27:52.566361Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2025-10-24T16:27:52.566365Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-10-24T16:27:52.566369Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 4 2025-10-24T16:27:52.566371Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1761323272492565 2025-10-24T16:27:52.566374Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-10-24T16:27:52.566377Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2025-10-24T16:27:52.566380Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-10-24T16:27:52.566386Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-10-24T16:27:52.566389Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:52.566397Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-10-24T16:27:52.566415Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:52.566419Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:52.566424Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:52.566452Z node 2 :STATISTICS DEBUG: tx_init.cpp:303: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.566713Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:52.566764Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:52.566832Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:52.566840Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:52.566985Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:52.566994Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:52.567204Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:52.579089Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5073:4575] 2025-10-24T16:27:52.579148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5073:4575], schemeshard id = 72075186224037897 2025-10-24T16:27:52.579167Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5025:4548], server id = [2:5074:4576], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:52.579192Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5074:4576] 2025-10-24T16:27:52.579212Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:5074:4576], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:52.621460Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:52.621508Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-10-24T16:27:52.621584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5011:4534], server id = [2:5015:4538], tablet id = 72075186224037900 2025-10-24T16:27:52.621588Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.621676Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5079:4581], server id = [2:5083:4585], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:52.621695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5079:4581], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.621738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5080:4582], server id = [2:5084:4586], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:52.621743Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5080:4582], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.621876Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5081:4583], server id = [2:5085:4587], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:52.621882Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5081:4583], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.621947Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5082:4584], server id = [2:5086:4588], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:52.621951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5082:4584], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:52.622158Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:52.622241Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5079:4581], server id = [2:5083:4585], tablet id = 72075186224037899 2025-10-24T16:27:52.622244Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.622296Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:52.622409Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5081:4583], server id = [2:5085:4587], tablet id = 72075186224037901 2025-10-24T16:27:52.622412Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.622466Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:52.622498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5082:4584], server id = [2:5086:4588], tablet id = 72075186224037902 2025-10-24T16:27:52.622500Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.622644Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:52.622650Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:52.622685Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:52.622703Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:52.622740Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5093:4594], ActorId: [2:5094:4595], Starting query actor #1 [2:5095:4596] 2025-10-24T16:27:52.622746Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5094:4595], ActorId: [2:5095:4596], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:52.623123Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5080:4582], server id = [2:5084:4586], tablet id = 72075186224037900 2025-10-24T16:27:52.623128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.623223Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5094:4595], ActorId: [2:5095:4596], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZGIwOWUzZDQtNGY3YzIzNTEtZDQ3OGQzZGYtN2FiMTRiYzQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:52.627757Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5104:4605]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.627806Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:52.627811Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5104:4605], StatRequests.size() = 1 2025-10-24T16:27:52.650465Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5094:4595], ActorId: [2:5095:4596], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGIwOWUzZDQtNGY3YzIzNTEtZDQ3OGQzZGYtN2FiMTRiYzQ=, TxId: 2025-10-24T16:27:52.650486Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5094:4595], ActorId: [2:5095:4596], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGIwOWUzZDQtNGY3YzIzNTEtZDQ3OGQzZGYtN2FiMTRiYzQ=, TxId: 2025-10-24T16:27:52.650547Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5093:4594], ActorId: [2:5094:4595], Got response [2:5095:4596] SUCCESS 2025-10-24T16:27:52.650625Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:52.672066Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.672086Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:52.755277Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5127:4613]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.755394Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.755400Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:52.755432Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:52.755439Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:52.755445Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:52.756501Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 15784302077936868069 12422639840633256089 11924282819443370064 7560627801543920824 268892711531616470 3837457043198474852 6728097301857353495 16295009113629967515 446001690605001494 17630053887588037791 17504886469506087110 8373303009707697878 10146339143798387988 12264707190149593139 7921618009645335262 10162681853685360407 6785055281234738627 8316437400647442252 7403815686825679110 185156723639943720 6989588595861227504 5144384585012792166 2221153977036011132 5910540663658979706 4886423111912943838 13094231717843971246 12542612239662336454 17540700897028097701 17780410738610805191 16811017269220824447 11034337373778708804 17805561809411332263 7245483269700071026 13301701085003800010 13610528554143712196 4659282785379161013 6115008500126698212 11283100127349575381 3687600773086010175 8833134959775222362 10764519495013463364 17430147171903461154 2689282108043936228 15953402708147282602 3957304009075373440 14277704764354734053 16116062537365437753 17271806415232044255 14311939227426583112 4673104235431107263 13651667766032730090 10284303314140407760 17611755306713847390 15135467152342232427 10808333636313026107 15893352743641176264 17787588530744626799 2637466018766484119 1455025709902392458 5826604964860047693 7972306151567229096 18159680241129486209 6566651466341507225 14943648239284895339 6126508614794792062 10623932556008765008 11859691135859516923 4828081536591552648 16931275833436317198 14525429985310396247 588364476466521408 5325249786108821266 1270446803020924557 14464224255642050449 6333971831348529388 17105738522244226341 4672020024446219715 4634379829311324358 319112136569837517 17506553614197116763 14431346139898535592 9427747724935108891 2018255390861095135 7699341629032885682 8652579613878982472 17139174283421800802 14767199339684490516 15255370812243621422 1737140301418802567 18121329093343024799 >> KqpRm::NodesMembershipByExchanger [GOOD] >> KqpRm::DisonnectNodes [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-10-24T16:27:51.930256Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:51.930431Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:51.930481Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:51.930615Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:51.938100Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:51.939327Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:51.939363Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:51.939581Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:51.939657Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:51.939685Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.939689Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.939712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:51.941769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:51.941801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:51.941811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:51.941821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.941829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.941836Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.963106Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.963683Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:51.963755Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.964088Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:51.995235Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.995272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.006061Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.006106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.006119Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.006130Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.006151Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.006157Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.006162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.006169Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.016910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.016948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.027705Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.027762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.028031Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.028042Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.030391Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.030411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.031002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.031033Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.031446Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.031545Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat 2025-10-24T16:27:52.031554Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat 2025-10-24T16:27:52.031769Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.031780Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.031865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.031885Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.031898Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.031933Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.031963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.032257Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.032294Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.042959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.043009Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.045004Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.045153Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.045281Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/0029ba/r3tmp/tmpki8TvQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1796682121811048567 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.045450Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.045500Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.045505Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.045534Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.045555Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.046998Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.047024Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.047034Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.047040Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.047048Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.047119Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.047126Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.047130Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.047153Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.047470Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.047475Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.047478Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.047484Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.047782Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.047788Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.047806Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.047809Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.048015Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.048024Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.048031Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.048034Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.048139Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.048173Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.048177Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.048179Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.061579Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.075037Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.075060Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.075084Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.075091Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.150075Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.201198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.232025Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-10-24T16:27:52.536882Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.537317Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.537355Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock |60.9%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-10-24T16:27:51.944408Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:51.944571Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:51.944622Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:51.944726Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:51.952706Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:51.953951Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:51.953985Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:51.954233Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:51.954295Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:51.954327Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.954331Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.954355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:51.956116Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:51.956145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:51.956154Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:51.956163Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.956171Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.956177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.977449Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.978000Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:51.978058Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.978390Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.009373Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.009414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.019988Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.020016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.020026Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.020036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.020054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.020060Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.020064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.020070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.030590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.030623Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.041205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.041246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.041431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.041436Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.042951Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.042962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.043300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.043321Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.043554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.043609Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat 2025-10-24T16:27:52.043614Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat 2025-10-24T16:27:52.043762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.043771Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.043828Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.043843Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.043871Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.043905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.043933Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.044195Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.044245Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.054906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.054942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.056857Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.056982Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.057092Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/0029b0/r3tmp/tmpiOSAxz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5516103305142871974 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.057219Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.057256Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.057261Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.057284Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.057290Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.058534Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.058578Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.058591Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.058601Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.058609Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.058668Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.058674Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.058677Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.058696Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.058960Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.058965Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.058967Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.058972Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.059201Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.059206Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.059222Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.059226Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.059382Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.059390Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.059396Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.059398Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.059493Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.059524Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.059528Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.059531Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.072342Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.085681Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.085699Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.085714Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.085718Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.160278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.211119Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.241816Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-10-24T16:27:52.546772Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.547163Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.547194Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> DataShardOutOfOrder::UncommittedReadSetAck >> KqpLimits::KqpMkqlMemoryLimitException >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardReadIteratorConsistency::BrokenWriteLockBeforeIteration [GOOD] >> DataShardReadIteratorConsistency::BrokenWriteLockDuringIteration ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-10-24T16:27:51.965365Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:51.965537Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:51.965593Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:51.965699Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:51.973014Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:51.974347Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:51.974387Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:51.974618Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:51.974682Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:51.974716Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.974721Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:51.974747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:51.976703Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:51.976731Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:51.976740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:51.976750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.976758Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:51.976765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:51.998219Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.998951Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:51.999009Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:51.999310Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:52.030313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.030348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.040947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:52.040979Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.040988Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:52.040997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.041011Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:52.041017Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.041022Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:52.041028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.051651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:52.051696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.062226Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:52.062269Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:52.062454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:52.062459Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:52.063834Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:52.063844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:52.064196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:52.064216Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:52.064439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:52.064500Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat 2025-10-24T16:27:52.064518Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat 2025-10-24T16:27:52.064667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:52.064674Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:52.064729Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:52.064746Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.064757Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.064786Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.064813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:52.065114Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.065150Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:52.075741Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:52.075775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:52.077557Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:52.077683Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:52.077775Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/0029a4/r3tmp/tmpZ8U9pp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1348035037861350307 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:52.077900Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.077936Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.077940Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:52.077963Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:52.077970Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.079032Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.079059Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.079069Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:52.079076Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:52.079084Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:52.079144Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.079150Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.079154Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.079172Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:52.079429Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:52.079434Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:52.079436Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:52.079442Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:52.079693Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.079699Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:52.079717Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:52.079719Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:52.079886Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:52.079897Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.079909Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.079913Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.080034Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:52.080068Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:52.080073Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:52.080075Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:52.094678Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:52.107892Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.107910Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.107924Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:52.107928Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:52.181586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.232587Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-10-24T16:27:52.263339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-10-24T16:27:52.568320Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:52.568794Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:52.568844Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:53.079486Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2025-10-24T16:27:53.079525Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2025-10-24T16:27:53.079530Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2025-10-24T16:27:53.079800Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:103:2090] ServerId# [1:401:2301] TabletId# 72057594037932033 PipeClientId# [2:103:2090] 2025-10-24T16:27:53.079867Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:189:2106] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-10-24T16:27:53.079890Z node 2 :LOCAL DEBUG: local.cpp:277: TEvTabletPipe::TEvClientDestroyed {TabletId=72057594046578946 ClientId=[2:433:2119]} 2025-10-24T16:27:53.079895Z node 2 :LOCAL DEBUG: local.cpp:220: TLocalNodeRegistrar HandlePipeDestroyed - DISCONNECTED 2025-10-24T16:27:53.079902Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:53.079920Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:563:2119] 2025-10-24T16:27:53.080561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:53.080580Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:53.080640Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-10-24T16:27:53.080663Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:563:2119]} 2025-10-24T16:27:53.080700Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:53.080710Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:53.080713Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:53.091603Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] |60.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |60.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |60.9%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-10-24T16:23:07.052350Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052356Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052359Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-10-24T16:23:07.052553Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:07.052559Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.052592Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009244s 2025-10-24T16:23:07.067914Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:07.067933Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.067937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.067960Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006854s 2025-10-24T16:23:07.083916Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-10-24T16:23:07.083935Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.083939Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-10-24T16:23:07.083962Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006929s 2025-10-24T16:23:07.141919Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1761322987141911 2025-10-24T16:23:07.560821Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564824631047683925:2154];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:07.560855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:23:07.576783Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.583859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:23:07.584091Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:23:07.584191Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564824630218171101:2274];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:23:07.584221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00137a/r3tmp/tmpXHBs3K/pdisk_1.dat 2025-10-24T16:23:07.825062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:07.827947Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:23:07.839334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.839367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.843973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:23:07.844132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:23:07.845630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:07.846895Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:23:07.848082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:23:07.870004Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3183, node 1 2025-10-24T16:23:08.005888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:08.032699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:23:08.056147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/00137a/r3tmp/yandexjU4lSI.tmp 2025-10-24T16:23:08.056164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/00137a/r3tmp/yandexjU4lSI.tmp 2025-10-24T16:23:08.056241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/00137a/r3tmp/yandexjU4lSI.tmp 2025-10-24T16:23:08.056292Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:23:08.067134Z INFO: TTestServer started on Port 8708 GrpcPort 3183 TClient is connected to server localhost:8708 PQClient connected to localhost:3183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:23:08.224826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-10-24T16:23:08.568245Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:08.582780Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:23:09.453273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824638808105777:2297], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.453326Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.453552Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824638808105804:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.455892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:23:09.454730Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824638808105830:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.454753Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.454793Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564824638808105832:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.454799Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:23:09.488461Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564824638808105806:2301], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-10-24T16:23:09.548723Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564824638808105838:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:23:09.824713Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564824638808105845:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Erro ... :53.120810Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-10-24T16:27:53.120811Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:27:53.120814Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-10-24T16:27:53.120817Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:27:53.120820Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-10-24T16:27:53.120822Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:27:53.120825Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-10-24T16:27:53.120827Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:27:53.120830Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-10-24T16:27:53.120846Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:926: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-10-24T16:27:53.120849Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:968: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-10-24T16:27:53.120852Z node 17 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:27:53.120853Z node 17 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:27:53.120854Z node 17 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:27:53.120858Z node 17 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:27:53.120869Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-10-24T16:27:53.120891Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-10-24T16:27:53.120898Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:883: [72075186224037892][Partition][0][StateIdle] read cookie 3 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-10-24T16:27:53.120907Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:1097: [72075186224037892][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 1208 count 10 last offset 0, current partition end offset: 10 2025-10-24T16:27:53.120909Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:1123: [72075186224037892][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-10-24T16:27:53.120916Z node 17 :PERSQUEUE DEBUG: cache_eviction.h:497: Got data from cache. Partition 0 offset 0 partno 0 count 10 parts_count 0 source 1 size 1208 accessed 0 times before, last time 2025-10-24T16:27:53.000000Z 2025-10-24T16:27:53.120923Z node 17 :PERSQUEUE DEBUG: read.h:126: [72075186224037892][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2025-10-24T16:27:53.120933Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:581: FormAnswer for 1 blobs 2025-10-24T16:27:53.120943Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' 2025-10-24T16:27:53.120977Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:496: FormAnswer processing batch offset 0 totakecount 10 count 10 size 1188 from pos 0 cbcount 10 2025-10-24T16:27:53.120993Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:1018: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1761323273119 queuesize 0 startOffset 0 2025-10-24T16:27:53.121158Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 1 } 2025-10-24T16:27:53.121170Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 1 2025-10-24T16:27:53.121175Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 2 2025-10-24T16:27:53.121178Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 3 2025-10-24T16:27:53.121182Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 4 2025-10-24T16:27:53.121187Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 5 2025-10-24T16:27:53.121191Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 6 2025-10-24T16:27:53.121193Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 7 2025-10-24T16:27:53.121196Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 8 2025-10-24T16:27:53.121200Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 9 2025-10-24T16:27:53.121203Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: acknoledged message 10 2025-10-24T16:27:53.121299Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: close. Timeout = 0 ms 2025-10-24T16:27:53.121309Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session will now close 2025-10-24T16:27:53.121314Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: aborting 2025-10-24T16:27:53.121441Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:27:53.121447Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0] Write session: destroy 2025-10-24T16:27:53.121618Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0 grpc read done: success: 0 data: 2025-10-24T16:27:53.121627Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0 grpc read failed 2025-10-24T16:27:53.121633Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0 grpc closed 2025-10-24T16:27:53.121638Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|deced881-c00dbf56-2372c5ed-b824ccca_0 is DEAD 2025-10-24T16:27:53.121883Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:27:53.121921Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037892] server disconnected, pipe [17:7564825859269385279:2629] destroyed 2025-10-24T16:27:53.121929Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:27:53.121936Z node 17 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:27:53.121938Z node 17 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:27:53.121939Z node 17 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:27:53.192882Z node 17 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:27:53.192902Z node 17 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:27:53.192905Z node 17 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:27:53.293245Z node 17 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:27:53.293266Z node 17 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:27:53.293269Z node 17 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist 2025-10-24T16:27:53.393610Z node 17 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:27:53.393630Z node 17 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:27:53.393633Z node 17 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037892][Partition][0][StateIdle] Try persist >> KqpQuery::Now >> KqpExplain::Explain >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> TraverseColumnShard::TraverseColumnTable [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> KqpExplain::ExplainStream >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpStats::RequestUnitForBadRequestExecute >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-10-24T16:27:23.326733Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.352943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:23.353021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:23.353057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a16/r3tmp/tmpZ4XX1F/pdisk_1.dat 2025-10-24T16:27:23.470426Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.509320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.509368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.550250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5762, node 1 2025-10-24T16:27:23.637404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:23.637430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:23.637435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:23.637624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:23.641369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:23.701347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17283 2025-10-24T16:27:24.087125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:24.995590Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:25.002467Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:25.003307Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:25.056443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:25.056488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:25.088522Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:25.096247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:25.196981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:25.197036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:25.216852Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:25.274423Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.296193Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296376Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296487Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296612Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296642Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296664Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.296677Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.370293Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:25.377982Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:25.378032Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:25.388446Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:25.388544Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:25.388581Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:25.388588Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:25.388596Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:25.388604Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:25.388612Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:25.388620Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:25.388798Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:25.391396Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:25.391426Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:25.393829Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:25.393888Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:25.448751Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:25.451364Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:25.455747Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:25.455769Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:25.455798Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:25.458863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:25.460805Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:25.460839Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:25.518253Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:25.628855Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:25.702118Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:25.858277Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:26.718323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:26.777292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.777329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.777440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2249:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.777449Z node 1 : ... 21: ConnectToSA(), pipe client id = [2:4929:4481] 2025-10-24T16:27:51.653558Z node 2 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [2:4929:4481] 2025-10-24T16:27:51.653669Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4930:4482] 2025-10-24T16:27:51.653707Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4929:4481], server id = [2:4930:4482], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:51.653716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:4930:4482], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:51.653722Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:51.653753Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:51.653764Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:4927:4479], StatRequests.size() = 1 2025-10-24T16:27:51.653806Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:51.670109Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4917:4469], ActorId: [2:4918:4470], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTRhZDQxNjctYTljMjFiYTAtZWViNzMzZTMtOTk2Y2JmNmU=, TxId: 2025-10-24T16:27:51.670132Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4917:4469], ActorId: [2:4918:4470], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTRhZDQxNjctYTljMjFiYTAtZWViNzMzZTMtOTk2Y2JmNmU=, TxId: 2025-10-24T16:27:51.670208Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4916:4468], ActorId: [2:4917:4469], Got response [2:4918:4470] SUCCESS 2025-10-24T16:27:51.670258Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:51.681677Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:51.681696Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:51.744084Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:51.744115Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:51.785648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:4929:4481], schemeshard count = 1 2025-10-24T16:27:53.886444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:53.886470Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:53.886480Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:53.886484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:53.887893Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:53.899667Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:53.899812Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:53.899826Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:53.900100Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:53.911140Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:53.911204Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:53.911361Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5041:4536], server id = [2:5045:4540], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:53.911446Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5041:4536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.911490Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5042:4537], server id = [2:5046:4541], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:53.911494Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5042:4537], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.911707Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5043:4538], server id = [2:5047:4542], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:53.911714Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5043:4538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.911784Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5044:4539], server id = [2:5048:4543], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:53.911789Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5044:4539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.912569Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:53.912673Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5041:4536], server id = [2:5045:4540], tablet id = 72075186224037899 2025-10-24T16:27:53.912677Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.912720Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:53.912776Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5042:4537], server id = [2:5046:4541], tablet id = 72075186224037900 2025-10-24T16:27:53.912779Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.912840Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:53.912888Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5043:4538], server id = [2:5047:4542], tablet id = 72075186224037901 2025-10-24T16:27:53.912890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.912952Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:53.912956Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:53.912975Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:53.912991Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:53.913030Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5061:4552], ActorId: [2:5062:4553], Starting query actor #1 [2:5063:4554] 2025-10-24T16:27:53.913037Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5062:4553], ActorId: [2:5063:4554], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:53.913471Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5044:4539], server id = [2:5048:4543], tablet id = 72075186224037902 2025-10-24T16:27:53.913477Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.913591Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5062:4553], ActorId: [2:5063:4554], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODdiMTkwN2MtNTZhMjdiNmQtYzYyZDNlZjktMmYzMzQ4ZmY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:53.918215Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5072:4563]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:53.918287Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:53.918294Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5072:4563], StatRequests.size() = 1 2025-10-24T16:27:53.976951Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5062:4553], ActorId: [2:5063:4554], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODdiMTkwN2MtNTZhMjdiNmQtYzYyZDNlZjktMmYzMzQ4ZmY=, TxId: 2025-10-24T16:27:53.976970Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5062:4553], ActorId: [2:5063:4554], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODdiMTkwN2MtNTZhMjdiNmQtYzYyZDNlZjktMmYzMzQ4ZmY=, TxId: 2025-10-24T16:27:53.977032Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5061:4552], ActorId: [2:5062:4553], Got response [2:5063:4554] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:53.977079Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5085:4569]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:53.977150Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:53.977154Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:53.977214Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.977382Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:53.977389Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:53.977395Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:53.978311Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-10-24T16:27:21.851187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.872836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.872903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.872928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a66/r3tmp/tmpgeu6Nx/pdisk_1.dat 2025-10-24T16:27:21.957787Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.990427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.990462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.022909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4413, node 1 2025-10-24T16:27:22.107189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:22.107215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:22.107219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:22.107375Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:22.110731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:22.136357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4340 2025-10-24T16:27:22.499257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.365653Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.369929Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.370926Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.383477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.383502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.407501Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.408333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.498357Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.498399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.511576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.568066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.590177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.601524Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.601752Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.601921Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602043Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602091Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602111Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602135Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.602200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.669092Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.682753Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.682791Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.691633Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.691662Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1865:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.691928Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.692128Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.692156Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.692162Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.692169Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.692176Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.692182Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.692191Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.692435Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.693393Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1873:2592] 2025-10-24T16:27:23.693451Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1873:2592], schemeshard id = 72075186224037897 2025-10-24T16:27:23.740011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1918:2610] 2025-10-24T16:27:23.740684Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.745931Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.745950Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.745963Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.748731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.750680Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.750712Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.886220Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.993309Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:24.077595Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.984194Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.137510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2221:3055], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.137555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.137674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2239:3060], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.137684Z node 1 :K ... client id = [2:4908:4475], server id = [2:4909:4476], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:51.649815Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:51.649830Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:4906:4473], StatRequests.size() = 1 2025-10-24T16:27:51.649839Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:51.668542Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4896:4463], ActorId: [2:4897:4464], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODY1YTM4ZTUtZTgxOWU0YTctNzc4MWUwYjEtNTcwNGNiNjY=, TxId: 2025-10-24T16:27:51.668568Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4896:4463], ActorId: [2:4897:4464], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODY1YTM4ZTUtZTgxOWU0YTctNzc4MWUwYjEtNTcwNGNiNjY=, TxId: 2025-10-24T16:27:51.668630Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4895:4462], ActorId: [2:4896:4463], Got response [2:4897:4464] SUCCESS 2025-10-24T16:27:51.668739Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:51.680552Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:51.680579Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:51.732653Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:51.732690Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:51.774185Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:4908:4475], schemeshard count = 1 2025-10-24T16:27:53.863184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:53.863207Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:53.863217Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:53.863221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:53.864393Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:53.876788Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:53.876966Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:53.876984Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:53.877250Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:53.888820Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:53.888906Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:53.889106Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5020:4530], server id = [2:5024:4534], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:53.889222Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5020:4530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.889292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5021:4531], server id = [2:5025:4535], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:53.889297Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5021:4531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.889580Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5022:4532], server id = [2:5026:4536], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:53.889588Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5022:4532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.889625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5023:4533], server id = [2:5027:4537], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:53.889629Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5023:4533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:53.890485Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:53.890563Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5020:4530], server id = [2:5024:4534], tablet id = 72075186224037899 2025-10-24T16:27:53.890567Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.890855Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:53.890913Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:53.890956Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5021:4531], server id = [2:5025:4535], tablet id = 72075186224037900 2025-10-24T16:27:53.890959Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.890999Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5023:4533], server id = [2:5027:4537], tablet id = 72075186224037902 2025-10-24T16:27:53.891001Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.891027Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:53.891031Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:53.891055Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:53.891075Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:53.891127Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5040:4546], ActorId: [2:5041:4547], Starting query actor #1 [2:5042:4548] 2025-10-24T16:27:53.891136Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5041:4547], ActorId: [2:5042:4548], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:53.891602Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5022:4532], server id = [2:5026:4536], tablet id = 72075186224037901 2025-10-24T16:27:53.891612Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.891819Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5041:4547], ActorId: [2:5042:4548], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjNjOGM2ZWMtZTg4MGQ1NGYtOTQ5OGQ3YWQtYmIyYjFjOTA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:53.902471Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5051:4557]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:53.902524Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:53.902532Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5051:4557], StatRequests.size() = 1 2025-10-24T16:27:53.959888Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5041:4547], ActorId: [2:5042:4548], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjNjOGM2ZWMtZTg4MGQ1NGYtOTQ5OGQ3YWQtYmIyYjFjOTA=, TxId: 2025-10-24T16:27:53.959911Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5041:4547], ActorId: [2:5042:4548], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjNjOGM2ZWMtZTg4MGQ1NGYtOTQ5OGQ3YWQtYmIyYjFjOTA=, TxId: 2025-10-24T16:27:53.959984Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5040:4546], ActorId: [2:5041:4547], Got response [2:5042:4548] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:53.960145Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.960542Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2319:2833];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2025-10-24T16:27:53.977214Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:53.977238Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:53.986589Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5071:4567];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2025-10-24T16:27:54.139704Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5182:4660]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:54.139836Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:54.139843Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:54.139915Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:54.139923Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:54.139929Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:54.141169Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpTypes::QuerySpecialTypes >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpQuery::Now [GOOD] >> KqpQuery::RewriteIfPresentToMap >> KqpQuery::NoEvaluate >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-10-24T16:27:21.930820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.952923Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.952993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.953025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a4d/r3tmp/tmppdnh1b/pdisk_1.dat 2025-10-24T16:27:22.029896Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.062579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.062620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.096094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24884, node 1 2025-10-24T16:27:22.178785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:22.178803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:22.178806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:22.178925Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:22.181255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:22.205480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21393 2025-10-24T16:27:22.557718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.421468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.423977Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.424801Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.435042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.435071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.460752Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.461281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.556617Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.556662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.568534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.622805Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.633119Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.655546Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.655731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.655931Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656017Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656079Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656099Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656120Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656152Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.656195Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.749382Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.767590Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.767626Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.776909Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.776941Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1865:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.777174Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.777244Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.777272Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.777279Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.777286Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.777293Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.777299Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.777307Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.777473Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.779042Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1876:2594] 2025-10-24T16:27:23.779114Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1876:2594], schemeshard id = 72075186224037897 2025-10-24T16:27:23.832915Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1920:2612] 2025-10-24T16:27:23.834876Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.839660Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.839689Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.839702Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.842004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.844092Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.844129Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:24.005419Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:24.072104Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:24.199336Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:25.111720Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.170390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2225:3056], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.170437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.170564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.170574Z node 1 ... e 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:54.416476Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:54.416510Z node 2 :STATISTICS DEBUG: tx_init.cpp:55: [72075186224037894] Loaded database: /Root/Database 2025-10-24T16:27:54.416516Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2025-10-24T16:27:54.416521Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-10-24T16:27:54.416524Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 4 2025-10-24T16:27:54.416527Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1761323274350636 2025-10-24T16:27:54.416530Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-10-24T16:27:54.416533Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2025-10-24T16:27:54.416536Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-10-24T16:27:54.416542Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-10-24T16:27:54.416546Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:54.416554Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-10-24T16:27:54.416559Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:54.416562Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:54.416569Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:54.416589Z node 2 :STATISTICS DEBUG: tx_init.cpp:303: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:54.416812Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:54.416933Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:54.416942Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:54.416957Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:54.417204Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:54.417217Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:54.417447Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:54.429364Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5104:4583] 2025-10-24T16:27:54.429404Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5060:4555], server id = [2:5104:4583], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:54.429440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:5104:4583], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:54.429487Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5105:4584] 2025-10-24T16:27:54.429503Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5105:4584], schemeshard id = 72075186224037897 2025-10-24T16:27:54.472237Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:54.472298Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:54.472575Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5110:4589], server id = [2:5114:4593], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:54.472600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5110:4589], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.472625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5111:4590], server id = [2:5115:4594], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:54.472630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5111:4590], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.472852Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5112:4591], server id = [2:5117:4596], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:54.472866Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5112:4591], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.472900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5113:4592], server id = [2:5116:4595], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:54.472908Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5113:4592], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.473271Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:54.473418Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5110:4589], server id = [2:5114:4593], tablet id = 72075186224037899 2025-10-24T16:27:54.473425Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.473463Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:54.473509Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:54.473555Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:54.473563Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:54.473641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5111:4590], server id = [2:5115:4594], tablet id = 72075186224037900 2025-10-24T16:27:54.473648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.473680Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:54.473717Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:54.473775Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5122:4601], ActorId: [2:5123:4602], Starting query actor #1 [2:5124:4603] 2025-10-24T16:27:54.473787Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5123:4602], ActorId: [2:5124:4603], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:54.474363Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5113:4592], server id = [2:5116:4595], tablet id = 72075186224037902 2025-10-24T16:27:54.474374Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.474487Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5112:4591], server id = [2:5117:4596], tablet id = 72075186224037901 2025-10-24T16:27:54.474492Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.474564Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5123:4602], ActorId: [2:5124:4603], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmQxOTA3NmQtNTBkNGEwNGItYjc5MWJlOTctZmQ5NmQ2MDE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:54.480179Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5133:4612]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:54.480235Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:54.480240Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5133:4612], StatRequests.size() = 1 2025-10-24T16:27:54.507007Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5123:4602], ActorId: [2:5124:4603], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmQxOTA3NmQtNTBkNGEwNGItYjc5MWJlOTctZmQ5NmQ2MDE=, TxId: 2025-10-24T16:27:54.507035Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5123:4602], ActorId: [2:5124:4603], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmQxOTA3NmQtNTBkNGEwNGItYjc5MWJlOTctZmQ5NmQ2MDE=, TxId: 2025-10-24T16:27:54.507128Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5122:4601], ActorId: [2:5123:4602], Got response [2:5124:4603] SUCCESS 2025-10-24T16:27:54.507234Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:54.518895Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:54.518917Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:54.572329Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5152:4620]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:54.572458Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:54.572468Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:54.572522Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:54.572532Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:54.572541Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:54.573873Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpExplain::Explain [GOOD] >> KqpExplain::ExplainDataQuery >> DataShardReadIteratorConsistency::BrokenWriteLockDuringIteration [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-10-24T16:27:53.755030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:53.777877Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:53.777952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:53.777962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001885/r3tmp/tmplnfTPU/pdisk_1.dat 2025-10-24T16:27:53.841513Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:53.842536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:53.842571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:53.842689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323273475063 != 1761323273475066 2025-10-24T16:27:53.873718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:53.919057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:53.951759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.035702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.051121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:54.051365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:54.051432Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:54.051513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:54.052720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:54.061101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:54.061138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:54.061306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:54.061317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:54.061325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:54.061397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:54.061423Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:54.061438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:54.071717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:54.076987Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:54.077055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:54.077079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:54.077085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:54.077090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:54.077096Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:54.077156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.077164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.077253Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:54.077271Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:54.077290Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:54.077299Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:54.077306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:54.077311Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:54.077316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:54.077321Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:54.077326Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:54.077339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.077345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.077352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:54.077445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:54.077453Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:54.077471Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:54.077526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:54.077538Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:54.077557Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:54.077565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:54.077570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:54.077575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:54.077580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:54.077672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:54.077678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:54.077683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:54.077687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:54.077698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:54.077702Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:54.077708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:54.077713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:54.077718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:54.078020Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:54.078030Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:54.088290Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:54.088320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:54.088329Z n ... 948350Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-10-24T16:27:55.266236Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715665. Ctx: { TraceId: 01k8bgrmxx7jg4ghvnjr6g6pnc, Database: , SessionId: ydb://session/3?node_id=1&id=OGZmZTJmMjUtNTM2MmY0YjItNDQzMjNmMGYtMTQzM2ZmYjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:27:55.267421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [1:1085:2844], Recipient [1:675:2566]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-10-24T16:27:55.267506Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:27:55.267526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-10-24T16:27:55.267548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:27:55.267554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:27:55.267561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:55.267566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:55.267582Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-10-24T16:27:55.267588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:27:55.267592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:55.267597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:27:55.267601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:27:55.267636Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:55.267705Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:27:55.267713Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-10-24T16:27:55.267720Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[1:1085:2844], 0} after executionsCount# 1 2025-10-24T16:27:55.267729Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[1:1085:2844], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:55.267746Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[1:1085:2844], 0} finished in read 2025-10-24T16:27:55.267762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:27:55.267769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:27:55.267773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:55.267777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:55.267789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:27:55.267793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:55.267797Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037888 has finished 2025-10-24T16:27:55.267802Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:27:55.267823Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:27:55.267939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [1:69:2116], Recipient [1:675:2566]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-10-24T16:27:55.267969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [1:1087:2845], Recipient [1:760:2626]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-10-24T16:27:55.267993Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-10-24T16:27:55.268001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-10-24T16:27:55.268010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:27:55.268014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-10-24T16:27:55.268019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:55.268023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:27:55.268031Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-10-24T16:27:55.268038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:27:55.268041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:55.268045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-10-24T16:27:55.268049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-10-24T16:27:55.268063Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-10-24T16:27:55.268094Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-10-24T16:27:55.268099Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-10-24T16:27:55.268104Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[1:1087:2845], 0} after executionsCount# 1 2025-10-24T16:27:55.268109Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[1:1087:2845], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:27:55.268119Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[1:1087:2845], 0} finished in read 2025-10-24T16:27:55.268127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:27:55.268130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-10-24T16:27:55.268134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:27:55.268138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:27:55.268144Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:27:55.268148Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:27:55.268152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037889 has finished 2025-10-24T16:27:55.268155Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-10-24T16:27:55.268168Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-10-24T16:27:55.268214Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [1:69:2116], Recipient [1:760:2626]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-10-24T16:27:55.268444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [1:1085:2844], Recipient [1:675:2566]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:55.268454Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:27:55.268786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [1:1087:2845], Recipient [1:760:2626]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:27:55.268798Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> KqpLimits::OutOfSpaceBulkUpsertFail >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> KqpParams::DefaultParameterValue ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-10-24T16:27:21.636300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.667713Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.667785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.667831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a93/r3tmp/tmp8rhuEf/pdisk_1.dat 2025-10-24T16:27:21.751179Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.783113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.783141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.815277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11729, node 1 2025-10-24T16:27:21.897558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.897574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.897579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.897728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.900847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.925098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12272 2025-10-24T16:27:22.285507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.158622Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.161578Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.162531Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.172834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.172872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.196655Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.197285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.288149Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.288194Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.300744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.356708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.378390Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378580Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378738Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378847Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378884Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378900Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378936Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378963Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.378980Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.454008Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.460950Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.460983Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.476455Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.476538Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.476568Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.476575Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.476582Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.476588Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.476595Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.476602Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.476714Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.479518Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.479551Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.481803Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:23.481861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:23.532108Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:23.533456Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.538062Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.538087Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.538103Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.541460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.543277Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.543317Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.610159Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.684665Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.746456Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.844179Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.704434Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.753567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.753613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.753740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2249:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.753750Z node 1 ... ActorId: [2:4125:3660], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:52.234199Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4124:3659], ActorId: [2:4125:3660], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTkzODhiMzctOWU1NGJkYWQtNTNlOTRhY2YtZGUxZmI3ZDI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:52.236698Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4124:3659], ActorId: [2:4125:3660], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTkzODhiMzctOWU1NGJkYWQtNTNlOTRhY2YtZGUxZmI3ZDI=, TxId: 2025-10-24T16:27:52.236717Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4124:3659], ActorId: [2:4125:3660], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTkzODhiMzctOWU1NGJkYWQtNTNlOTRhY2YtZGUxZmI3ZDI=, TxId: 2025-10-24T16:27:52.236787Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4123:3658], ActorId: [2:4124:3659], Got response [2:4125:3660] SUCCESS 2025-10-24T16:27:52.236832Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:52.248147Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:52.248164Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:47: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3091:3308] 2025-10-24T16:27:52.975558Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:52.975587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-10-24T16:27:52.975592Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:670: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:27:53.642266Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-10-24T16:27:53.642396Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 7 2025-10-24T16:27:53.642471Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 0 2025-10-24T16:27:53.642492Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-10-24T16:27:53.652925Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:53.652960Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:53.653035Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2025-10-24T16:27:53.664230Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:53.674670Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:53.674711Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-10-24T16:27:53.674719Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:53.674814Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4212:3702], ActorId: [2:4213:3703], Starting query actor #1 [2:4214:3704] 2025-10-24T16:27:53.674826Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4213:3703], ActorId: [2:4214:3704], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:53.675418Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4213:3703], ActorId: [2:4214:3704], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Zjg1M2I5YzctZmFkZmQwOWQtMjhmZGQyOTEtNmQ2YTQ0OTU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:53.677706Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4213:3703], ActorId: [2:4214:3704], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Zjg1M2I5YzctZmFkZmQwOWQtMjhmZGQyOTEtNmQ2YTQ0OTU=, TxId: 2025-10-24T16:27:53.677722Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4213:3703], ActorId: [2:4214:3704], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Zjg1M2I5YzctZmFkZmQwOWQtMjhmZGQyOTEtNmQ2YTQ0OTU=, TxId: 2025-10-24T16:27:53.677773Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4212:3702], ActorId: [2:4213:3703], Got response [2:4214:3704] SUCCESS 2025-10-24T16:27:53.677818Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.689221Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:53.689244Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:54.399473Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:54.399501Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:54.399507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:55.108541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:55.108622Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-10-24T16:27:55.108632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:27:55.108772Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4295:3742], ActorId: [2:4296:3743], Starting query actor #1 [2:4297:3744] 2025-10-24T16:27:55.108786Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4296:3743], ActorId: [2:4297:3744], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:55.109693Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4296:3743], ActorId: [2:4297:3744], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDllZDI0NTMtMzM4NzQwN2YtZGViOTRlN2YtMjM0YTBlOWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:55.114981Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4296:3743], ActorId: [2:4297:3744], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDllZDI0NTMtMzM4NzQwN2YtZGViOTRlN2YtMjM0YTBlOWQ=, TxId: 2025-10-24T16:27:55.115015Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4296:3743], ActorId: [2:4297:3744], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDllZDI0NTMtMzM4NzQwN2YtZGViOTRlN2YtMjM0YTBlOWQ=, TxId: 2025-10-24T16:27:55.115122Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4295:3742], ActorId: [2:4296:3743], Got response [2:4297:3744] SUCCESS 2025-10-24T16:27:55.115228Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:55.126832Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:27:55.126855Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3091:3308] 2025-10-24T16:27:55.127004Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4320:3758]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:55.127655Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:55.127664Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:55.127769Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:55.127777Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:55.127784Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:55.128422Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:55.128528Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 2 2025-10-24T16:27:55.128809Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4343:3769]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:55.129334Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:55.129343Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:55.129394Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:55.129400Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:55.129404Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:55.129709Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-10-24T16:27:55.129754Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> KqpParams::ImplicitParameterTypes >> KqpRm::SnapshotSharingByExchanger [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::MixedCreateAsSelect >> KqpStats::MultiTxStatsFullExpYql ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-10-24T16:27:52.955949Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-10-24T16:27:52.956112Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-10-24T16:27:52.956167Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-10-24T16:27:52.956274Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-10-24T16:27:52.964384Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.965629Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.965663Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-10-24T16:27:52.965921Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.965986Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-10-24T16:27:52.966022Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.966027Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-10-24T16:27:52.966053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-10-24T16:27:52.967863Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-10-24T16:27:52.967892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-10-24T16:27:52.967902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-10-24T16:27:52.967922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.967957Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-10-24T16:27:52.967969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:52.989316Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.990097Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-10-24T16:27:52.990173Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:27:52.990489Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-10-24T16:27:53.021779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-10-24T16:27:53.021826Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:53.032557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-10-24T16:27:53.032603Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:53.032616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-10-24T16:27:53.032628Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:53.032649Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-10-24T16:27:53.032655Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:53.032660Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-10-24T16:27:53.032666Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:53.043403Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-10-24T16:27:53.043451Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:53.054220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-10-24T16:27:53.054281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-10-24T16:27:53.054461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-10-24T16:27:53.054466Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-10-24T16:27:53.055899Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-10-24T16:27:53.055912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-10-24T16:27:53.056315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-10-24T16:27:53.056335Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-10-24T16:27:53.056576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-10-24T16:27:53.056655Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat 2025-10-24T16:27:53.056660Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat 2025-10-24T16:27:53.056817Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-10-24T16:27:53.056824Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-10-24T16:27:53.056875Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:27:53.056891Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:53.056903Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:53.056930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:27:53.056958Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-10-24T16:27:53.057248Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:53.057282Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-10-24T16:27:53.068146Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-10-24T16:27:53.068197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:27:53.070009Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-10-24T16:27:53.070145Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-10-24T16:27:53.070283Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/fxx3/0028fd/r3tmp/tmp3d5JQ6/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 899143082271880056 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-10-24T16:27:53.070438Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:53.070477Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:53.070481Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-10-24T16:27:53.070506Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-10-24T16:27:53.070512Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:53.071616Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:53.071655Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:53.071666Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-10-24T16:27:53.071675Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-10-24T16:27:53.071682Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-10-24T16:27:53.071747Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:53.071753Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:53.071756Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:53.071779Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:431:2318] 2025-10-24T16:27:53.072073Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-10-24T16:27:53.072078Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-10-24T16:27:53.072080Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-10-24T16:27:53.072086Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:433:2119] 2025-10-24T16:27:53.072326Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:53.072330Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:420:2314] 2025-10-24T16:27:53.072347Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-10-24T16:27:53.072350Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:421:2115] 2025-10-24T16:27:53.072538Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:431:2318]} 2025-10-24T16:27:53.072545Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:53.072552Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:53.072555Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:53.072658Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:433:2119]} 2025-10-24T16:27:53.072692Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-10-24T16:27:53.072696Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-10-24T16:27:53.072698Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-10-24T16:27:53.086048Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-10-24T16:27:53.099253Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:53.099273Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:53.099287Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-10-24T16:27:53.099291Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-10-24T16:27:53.173410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-10-24T16:27:53.224673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-10-24T16:27:53.255571Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-10-24T16:27:53.561433Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-10-24T16:27:53.562117Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-10-24T16:27:53.562171Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-10-24T16:27:54.288023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:54.312761Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:54.312824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:54.312830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001875/r3tmp/tmpT50Cri/pdisk_1.dat 2025-10-24T16:27:54.377339Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.378138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.378166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.378259Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323274011794 != 1761323274011797 2025-10-24T16:27:54.409172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.455755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:54.488412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.571602Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-10-24T16:27:54.571643Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-10-24T16:27:54.571667Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-10-24T16:27:54.592179Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1673: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-10-24T16:27:54.592218Z node 1 :TX_PROXY DEBUG: schemereq.cpp:613: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-10-24T16:27:54.592464Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1738: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-10-24T16:27:54.592490Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1728: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-10-24T16:27:54.592556Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1561: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-10-24T16:27:54.592610Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1608: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-10-24T16:27:54.592625Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-10-24T16:27:54.593155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.593280Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1463: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-10-24T16:27:54.593429Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1485: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-10-24T16:27:54.593441Z node 1 :TX_PROXY DEBUG: schemereq.cpp:593: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-10-24T16:27:54.608833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:54.609153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:54.609227Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:680:2569] 2025-10-24T16:27:54.609294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:54.611983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:54.621146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:680:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:54.621254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:54.621390Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:683:2571] 2025-10-24T16:27:54.621433Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:54.622804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:667:2561], Recipient [1:683:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:54.622922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:54.622946Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:54.623098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:54.623108Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:54.623115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:54.623187Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:54.623217Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:54.623230Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:712:2569] in generation 1 2025-10-24T16:27:54.623299Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:54.623316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:54.623421Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:27:54.623428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:27:54.623434Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:27:54.623461Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:54.623473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:54.623481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:713:2571] in generation 1 2025-10-24T16:27:54.633802Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:54.646152Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:54.646247Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:54.646276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:716:2590] 2025-10-24T16:27:54.646282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:54.646288Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:54.646295Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:54.646332Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:54.646342Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:27:54.646354Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:54.646363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:717:2591] 2025-10-24T16:27:54.646367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:27:54.646371Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:27:54.646375Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:27:54.646439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:680:2569], Recipient [1:680:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.646447Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.646460Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Send ... ecutedNoMoreRestarts 2025-10-24T16:27:56.392151Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:27:56.392155Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-10-24T16:27:56.392160Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:56.392179Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:56.392183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-10-24T16:27:56.392187Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:27:56.392191Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:27:56.392198Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-10-24T16:27:56.392202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:27:56.392206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037888 has finished 2025-10-24T16:27:56.402419Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:27:56.402447Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:27:56.402457Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-10-24T16:27:56.402482Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:56.403455Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-10-24T16:27:56.403469Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 281474976710661 ProcessProposeTransaction 2025-10-24T16:27:56.403480Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [2:863:2681] DataReq marker# P0 2025-10-24T16:27:56.403504Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:863:2681] Cookie# 0 txid# 281474976710661 HANDLE TDataReq marker# P1 2025-10-24T16:27:56.403568Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:863:2681] txid# 281474976710661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-10-24T16:27:56.403605Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:863:2681] txid# 281474976710661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-10-24T16:27:56.403643Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:863:2681] txid# 281474976710661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-10-24T16:27:56.403703Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 863 RawX2: 8589937273 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t_\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000" TxId: 281474976710661 ExecLevel: 0 Flags: 8 2025-10-24T16:27:56.403709Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:56.403734Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:56.403773Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-10-24T16:27:56.403787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit CheckDataTx 2025-10-24T16:27:56.403797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-10-24T16:27:56.403802Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit CheckDataTx 2025-10-24T16:27:56.403806Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:56.403810Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:27:56.403819Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-10-24T16:27:56.403829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976710661] at 72075186224037888 2025-10-24T16:27:56.403834Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-10-24T16:27:56.403838Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:56.403845Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-10-24T16:27:56.403865Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit MakeScanSnapshot 2025-10-24T16:27:56.403870Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-10-24T16:27:56.403873Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-10-24T16:27:56.403877Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-10-24T16:27:56.403881Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit WaitForStreamClearance 2025-10-24T16:27:56.403890Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:863:2681] for [0:281474976710661] at 72075186224037888 2025-10-24T16:27:56.403894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Continue 2025-10-24T16:27:56.403906Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:56.403925Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976710661 2025-10-24T16:27:56.403937Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976710661 2025-10-24T16:27:56.403943Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976710661, cleared: 1 2025-10-24T16:27:56.403967Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287942, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTx.TEvStreamClearancePending TxId: 281474976710661 2025-10-24T16:27:56.403973Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-10-24T16:27:56.403988Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287940, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976710661 Cleared: true 2025-10-24T16:27:56.403993Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-10-24T16:27:56.404005Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:675:2566], Recipient [2:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:56.404008Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:56.404016Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:56.404023Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:27:56.404028Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2025-10-24T16:27:56.404031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit WaitForStreamClearance 2025-10-24T16:27:56.404037Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976710661] at 72075186224037888 2025-10-24T16:27:56.404043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-10-24T16:27:56.404048Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-10-24T16:27:56.404053Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit ReadTableScan 2025-10-24T16:27:56.404057Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit ReadTableScan 2025-10-24T16:27:56.404100Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Continue 2025-10-24T16:27:56.404104Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-10-24T16:27:56.404108Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-10-24T16:27:56.404112Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:56.404115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:56.404125Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:56.404231Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435082, Sender [2:869:2686], Recipient [2:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-10-24T16:27:56.404236Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-10-24T16:27:28.734699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:28.766720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:28.769657Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:28.769726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:28.769753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0029fa/r3tmp/tmpJWylJK/pdisk_1.dat 2025-10-24T16:27:28.877863Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:28.913950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:28.913995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:28.950215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12707, node 1 2025-10-24T16:27:29.030829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:29.030862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:29.030867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:29.031056Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:29.048597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:29.096465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13640 2025-10-24T16:27:29.495260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:30.381241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:30.383725Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:30.394139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:30.394185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:30.427894Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:30.428484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:30.523509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:30.523556Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:30.524005Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524125Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524277Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524447Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524505Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524570Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524599Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524617Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.524643Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:30.536478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:30.586783Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:30.592943Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:30.592978Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:30.601323Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:30.601406Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:30.601435Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:30.601440Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:30.601444Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:30.601449Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:30.601453Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:30.601459Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:30.601572Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:30.601765Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1611:2450] 2025-10-24T16:27:30.602204Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:30.603866Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:30.603882Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:30.603893Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:30.607780Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1839:2590] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:30.608495Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:30.620852Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:30.620894Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1905:2626], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:30.623024Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1942:2648] 2025-10-24T16:27:30.623200Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1942:2648], schemeshard id = 72075186224037897 2025-10-24T16:27:30.690409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:30.712324Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:30.734009Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:30.892440Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:30.894312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:30.895609Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:30.895638Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:30.983918Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:31.182585Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:31.594270Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:31.626432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2226:3060], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:31.626477Z ... 6:27:52.980730Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4124:3810], server id = [2:4125:3811], tablet id = 72075186224037899 2025-10-24T16:27:52.980737Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:52.980825Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4130:3815], ActorId: [2:4131:3816], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Mzk0ZDhiYzUtOWY2YzUzZjUtMmQyOTJmZGEtYTdjMmRjOGE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:52.985522Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4140:3825]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:52.985593Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:52.985599Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4140:3825], StatRequests.size() = 1 2025-10-24T16:27:53.029987Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4130:3815], ActorId: [2:4131:3816], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mzk0ZDhiYzUtOWY2YzUzZjUtMmQyOTJmZGEtYTdjMmRjOGE=, TxId: 2025-10-24T16:27:53.030012Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4130:3815], ActorId: [2:4131:3816], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mzk0ZDhiYzUtOWY2YzUzZjUtMmQyOTJmZGEtYTdjMmRjOGE=, TxId: 2025-10-24T16:27:53.030071Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4129:3814], ActorId: [2:4130:3815], Got response [2:4131:3816] SUCCESS 2025-10-24T16:27:53.030144Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.041481Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:53.041503Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:53.105080Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4168:3833] 2025-10-24T16:27:53.105273Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3109:3332] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-10-24T16:27:53.105284Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:38: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3109:3332] 2025-10-24T16:27:53.105299Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:100: [72075186224037894] TTxAnalyze::Complete 2025-10-24T16:27:53.344191Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:53.344218Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:53.662987Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:53.663034Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:53.663326Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:53.675249Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:53.675426Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:53.675442Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:53.687059Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:54.514547Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:54.514581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:54.514587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:54.514655Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:54.514799Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:54.514828Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:54.526206Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:55.307714Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:55.307758Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:55.307993Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:55.319593Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:55.361156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:55.361184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:55.361188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:56.074328Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:56.074385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:56.074391Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:56.074588Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:56.085912Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:56.086023Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:56.086037Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:56.086150Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:56.097696Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:56.097779Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:56.097977Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4303:3904], server id = [2:4304:3905], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:56.098015Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4303:3904], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:56.098384Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:56.098403Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:56.098441Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:56.098474Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:56.098579Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4306:3907], ActorId: [2:4307:3908], Starting query actor #1 [2:4308:3909] 2025-10-24T16:27:56.098594Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4307:3908], ActorId: [2:4308:3909], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:56.099312Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4303:3904], server id = [2:4304:3905], tablet id = 72075186224037899 2025-10-24T16:27:56.099324Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:56.099486Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4307:3908], ActorId: [2:4308:3909], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YmEyMzIzMDAtNWYwZGYyOWUtOWNlMTNhZjEtZWY1Zjg1M2M=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:56.113873Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4307:3908], ActorId: [2:4308:3909], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmEyMzIzMDAtNWYwZGYyOWUtOWNlMTNhZjEtZWY1Zjg1M2M=, TxId: 2025-10-24T16:27:56.113907Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4307:3908], ActorId: [2:4308:3909], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmEyMzIzMDAtNWYwZGYyOWUtOWNlMTNhZjEtZWY1Zjg1M2M=, TxId: 2025-10-24T16:27:56.114019Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4306:3907], ActorId: [2:4307:3908], Got response [2:4308:3909] SUCCESS 2025-10-24T16:27:56.114129Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:56.147941Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:56.147973Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3109:3332] >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] |60.9%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpExplain::CreateTableAs+Stats >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpExplain::UpdateSecondaryConditional+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-10-24T16:27:21.032990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.068730Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.068797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.068822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ab9/r3tmp/tmpKJiwbj/pdisk_1.dat 2025-10-24T16:27:21.159976Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.192515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.192554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.225310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10844, node 1 2025-10-24T16:27:21.305041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.305057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.305060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.305175Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.308494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.353953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27723 2025-10-24T16:27:21.673874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.514609Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.516921Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.536693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.536734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.585042Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.585584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.679508Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.679547Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.679976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680157Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680288Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680482Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680507Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680564Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.680640Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.692210Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.752830Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.763699Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.763744Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.770890Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.771073Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.771098Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.771104Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.771110Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.771116Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.771122Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.771131Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.771281Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.771552Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1611:2452] 2025-10-24T16:27:22.773190Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-10-24T16:27:22.774840Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.774857Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.774868Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:22.780855Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.780886Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1846:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.783753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1864:2607] 2025-10-24T16:27:22.784395Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1864:2607], schemeshard id = 72075186224037897 2025-10-24T16:27:22.789592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.792737Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.792777Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:22.856817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.869412Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-10-24T16:27:22.931001Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.072843Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.204406Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.924185Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.949934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:24.571108Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:24.604281Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-10-24T16:27:24.604305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8270: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-10-24T16:27:24.604317Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:2544:2916], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186 ... count = 0, need schemeshards count = 1 2025-10-24T16:27:54.331861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:54.331898Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:54.331911Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:5413:4698], StatRequests.size() = 1 2025-10-24T16:27:54.331961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:54.353185Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5403:4688], ActorId: [2:5404:4689], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2Q1Y2M0YzgtNjRjYzYyYTktMmMyZWFmZDMtYWY3MzhlNDQ=, TxId: 2025-10-24T16:27:54.353210Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5403:4688], ActorId: [2:5404:4689], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2Q1Y2M0YzgtNjRjYzYyYTktMmMyZWFmZDMtYWY3MzhlNDQ=, TxId: 2025-10-24T16:27:54.353286Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5402:4687], ActorId: [2:5403:4688], Got response [2:5404:4689] SUCCESS 2025-10-24T16:27:54.353402Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:54.364971Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:54.364987Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:54.480209Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:54.480248Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:54.554041Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:5415:4700], schemeshard count = 1 2025-10-24T16:27:54.975249Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-10-24T16:27:54.975290Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.992000s, at schemeshard: 72075186224037899 2025-10-24T16:27:54.975381Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 28, entries count: 1, are all stats full: 1 2025-10-24T16:27:54.987256Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:56.657678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:56.657712Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:56.657724Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-10-24T16:27:56.657729Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-10-24T16:27:56.658850Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:56.672664Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:56.672909Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:56.672936Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:56.673334Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:56.685458Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:56.685565Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:56.685854Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5535:4763], server id = [2:5539:4767], tablet id = 72075186224037905, status = OK 2025-10-24T16:27:56.686038Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5535:4763], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:27:56.686166Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5536:4764], server id = [2:5540:4768], tablet id = 72075186224037906, status = OK 2025-10-24T16:27:56.686179Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5536:4764], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:27:56.686468Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5537:4765], server id = [2:5541:4769], tablet id = 72075186224037907, status = OK 2025-10-24T16:27:56.686483Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5537:4765], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:27:56.686711Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5538:4766], server id = [2:5542:4770], tablet id = 72075186224037908, status = OK 2025-10-24T16:27:56.686724Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5538:4766], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:27:56.688991Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-10-24T16:27:56.689119Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5535:4763], server id = [2:5539:4767], tablet id = 72075186224037905 2025-10-24T16:27:56.689128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:56.689294Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-10-24T16:27:56.689437Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5536:4764], server id = [2:5540:4768], tablet id = 72075186224037906 2025-10-24T16:27:56.689443Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:56.689584Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-10-24T16:27:56.689660Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5537:4765], server id = [2:5541:4769], tablet id = 72075186224037907 2025-10-24T16:27:56.689665Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:56.689755Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-10-24T16:27:56.689762Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:56.689792Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:56.689816Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:56.689889Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5555:4779], ActorId: [2:5556:4780], Starting query actor #1 [2:5557:4781] 2025-10-24T16:27:56.689897Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5556:4780], ActorId: [2:5557:4781], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-10-24T16:27:56.690462Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5538:4766], server id = [2:5542:4770], tablet id = 72075186224037908 2025-10-24T16:27:56.690472Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:56.690639Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5556:4780], ActorId: [2:5557:4781], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWY2N2UxMzMtMzVkYmYwMC05MGI5ODA1My1hNTJlM2IyYw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:56.700879Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5566:4790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:56.700969Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:56.700978Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5566:4790], StatRequests.size() = 1 2025-10-24T16:27:56.761246Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5556:4780], ActorId: [2:5557:4781], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWY2N2UxMzMtMzVkYmYwMC05MGI5ODA1My1hNTJlM2IyYw==, TxId: 2025-10-24T16:27:56.761283Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5556:4780], ActorId: [2:5557:4781], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWY2N2UxMzMtMzVkYmYwMC05MGI5ODA1My1hNTJlM2IyYw==, TxId: 2025-10-24T16:27:56.761373Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5555:4779], ActorId: [2:5556:4780], Got response [2:5557:4781] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:56.761480Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5579:4796]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:56.761569Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:56.761685Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:56.761693Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:56.761744Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:56.761755Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-10-24T16:27:56.761763Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:56.763015Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-10-24T16:27:53.660187Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:53.672792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:53.672818Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:53.673265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:53.673311Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:53.673360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:53.674282Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:53.682808Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:53.682851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:53.682963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:53.682970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:53.682976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:53.683033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:53.683040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:53.683049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:53.721140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:53.726189Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:53.726263Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:53.726286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:53.726292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:53.726298Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:53.726304Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.726361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.726369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.726438Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:53.726464Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:53.726473Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.726482Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:53.726489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:53.726495Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:53.726502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:53.726508Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:53.726513Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:53.726524Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.726528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.726534Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:53.727072Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:53.727081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:53.727091Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:53.727126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:53.727137Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:53.727147Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:53.727154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:53.727158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:53.727163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:53.727167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.727226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:53.727231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:53.727235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:53.727239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.727247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:53.727251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:53.727254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:53.727257Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.727263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:53.737996Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:53.738021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.738028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.738039Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:53.738051Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:53.738150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.738158Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.738166Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:53.738183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:53.738188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:53.738219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.738226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:53.738231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:53.738236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:53.741617Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:53.741649Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.741723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.741730Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.741741Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.741749Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:53.741756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:53.741766Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:53.741772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... 16:27:57.068508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2025-10-24T16:27:57.068515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2025-10-24T16:27:57.068525Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:57.068529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-10-24T16:27:57.068536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:27:57.068545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:57.068549Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:57.068583Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:57.068587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-10-24T16:27:57.068595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:27:57.068601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:57.068606Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:57.068644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-10-24T16:27:57.068648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.068652Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-10-24T16:27:57.068666Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-10-24T16:27:57.068670Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.068674Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-10-24T16:27:57.068703Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-10-24T16:27:57.068707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.068711Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-10-24T16:27:57.068725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:240:2232], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-10-24T16:27:57.068728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.068732Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-10-24T16:27:57.068742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [1:240:2232], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-10-24T16:27:57.068748Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:57.068752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-10-24T16:27:57.068763Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-10-24T16:27:57.068771Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-10-24T16:27:57.068785Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-10-24T16:27:57.068805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:57.068810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:57.068815Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-10-24T16:27:57.068822Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:27:57.068828Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-10-24T16:27:57.068833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-10-24T16:27:57.068840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:57.068845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-10-24T16:27:57.068849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2025-10-24T16:27:57.068854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2025-10-24T16:27:57.068859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:57.068862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2025-10-24T16:27:57.068866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-10-24T16:27:57.068870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-10-24T16:27:57.069014Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-10-24T16:27:57.069023Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:27:57.069033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-10-24T16:27:57.069038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-10-24T16:27:57.069045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-10-24T16:27:57.069049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-10-24T16:27:57.069112Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-10-24T16:27:57.069117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-10-24T16:27:57.069120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-10-24T16:27:57.069124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-10-24T16:27:57.069130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:152] at 9437186 is Executed 2025-10-24T16:27:57.069134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-10-24T16:27:57.069137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000005:152] at 9437186 has finished 2025-10-24T16:27:57.069142Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:57.069145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-10-24T16:27:57.069150Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-10-24T16:27:57.069153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-10-24T16:27:57.080374Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-10-24T16:27:57.080398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-10-24T16:27:57.080417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:105:2138], exec latency: 1 ms, propose latency: 3 ms 2025-10-24T16:27:57.080431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:57.080437Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-10-24T16:27:57.080508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:240:2232]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-10-24T16:27:57.080515Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.080521Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-10-24T16:27:54.059832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:54.072568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:54.072586Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.072935Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:54.072971Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:54.073008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:54.073716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:54.081274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:54.081308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:54.081407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:54.081413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:54.081419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:54.081473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:54.081481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:54.081489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:54.117838Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:54.120798Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:54.120847Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:54.120862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:54.120866Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:54.120869Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:54.120872Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:54.120907Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.120911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.120955Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:54.120969Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:54.120974Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:54.120978Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:54.120982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:54.120986Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:54.120990Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:54.120994Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:54.120997Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:54.121004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.121007Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.121011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:54.121343Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:54.121349Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:54.121355Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:54.121380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:54.121388Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:54.121396Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:54.121400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:54.121403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:54.121406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:54.121412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:54.121454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:54.121456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:54.121459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:54.121461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:54.121467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:54.121469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:54.121472Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:54.121474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:54.121478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:54.132084Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:54.132103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:54.132120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:54.132129Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:54.132141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:54.132217Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.132223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:54.132228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:54.132242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:54.132245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:54.132283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:54.132289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:54.132293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:54.132296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:54.134349Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:54.134368Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:54.134416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.134421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:54.134428Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:54.134434Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:54.134437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:54.134444Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:54.134449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... ions 2025-10-24T16:27:57.163185Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:57.163221Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:353:2320], Recipient [2:353:2320]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:57.163226Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:57.163233Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-10-24T16:27:57.163238Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:57.163241Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-10-24T16:27:57.163246Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-10-24T16:27:57.163250Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-10-24T16:27:57.163255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163260Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-10-24T16:27:57.163266Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-10-24T16:27:57.163271Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-10-24T16:27:57.163379Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-10-24T16:27:57.163386Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163390Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-10-24T16:27:57.163394Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-10-24T16:27:57.163399Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-10-24T16:27:57.163404Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163408Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-10-24T16:27:57.163412Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-10-24T16:27:57.163417Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-10-24T16:27:57.163425Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2025-10-24T16:27:57.163429Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-10-24T16:27:57.163433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2025-10-24T16:27:57.163439Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163444Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-10-24T16:27:57.163448Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-10-24T16:27:57.163452Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-10-24T16:27:57.163459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-10-24T16:27:57.163466Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-10-24T16:27:57.163471Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-10-24T16:27:57.163475Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163479Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-10-24T16:27:57.163483Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-10-24T16:27:57.163488Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-10-24T16:27:57.163495Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163499Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-10-24T16:27:57.163504Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-10-24T16:27:57.163508Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-10-24T16:27:57.163513Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163517Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-10-24T16:27:57.163521Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2025-10-24T16:27:57.163526Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2025-10-24T16:27:57.163530Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163535Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2025-10-24T16:27:57.163539Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-10-24T16:27:57.163543Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-10-24T16:27:57.163601Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-10-24T16:27:57.163624Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:27:57.163632Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163636Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-10-24T16:27:57.163640Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-10-24T16:27:57.163644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-10-24T16:27:57.163687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-10-24T16:27:57.163691Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-10-24T16:27:57.163695Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-10-24T16:27:57.163699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-10-24T16:27:57.163705Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437185 is Executed 2025-10-24T16:27:57.163709Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-10-24T16:27:57.163714Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000016:45] at 9437185 has finished 2025-10-24T16:27:57.163720Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:57.163724Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-10-24T16:27:57.163729Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-10-24T16:27:57.163733Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-10-24T16:27:57.174813Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-10-24T16:27:57.174835Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-10-24T16:27:57.174849Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-10-24T16:27:57.174856Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-10-24T16:27:57.174874Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:27:57.174883Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-10-24T16:27:57.175188Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-10-24T16:27:57.175206Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-10-24T16:27:57.175219Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:57.175226Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-10-24T16:27:57.175242Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:27:57.175250Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-10-24T16:27:21.116034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.140397Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.140481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.140518Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ab2/r3tmp/tmpdSCXst/pdisk_1.dat 2025-10-24T16:27:21.233507Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.265931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.265962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.298070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12074, node 1 2025-10-24T16:27:21.378235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.378253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.378256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.378379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.380837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.416489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17888 2025-10-24T16:27:21.744582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.554382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.557583Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.558603Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.575599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.575638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.598907Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.599426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.686151Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.686191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.705916Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.762713Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.786815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787029Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787212Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787324Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787365Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787383Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787424Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787454Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.787473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.829022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:22.873950Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.882168Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.882202Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.888173Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.888372Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.888395Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.888402Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.888408Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.888414Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.888420Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.888428Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.889164Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.895272Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.895300Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1872:2592], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.897252Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1889:2603] 2025-10-24T16:27:22.897296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1889:2603], schemeshard id = 72075186224037897 2025-10-24T16:27:22.947492Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1923:2614] 2025-10-24T16:27:22.948757Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.962231Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.962296Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.962311Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.965178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.967387Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.967429Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.096427Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.159127Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.258029Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.143483Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.188382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3059], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.188417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.188496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2246:3064], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:24.188501Z node 1 ... egator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:48.582010Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 5 2025-10-24T16:27:48.582064Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-10-24T16:27:51.464035Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:52.565307Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-10-24T16:27:52.565337Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8321: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:52.565343Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8352: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:52.565348Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-10-24T16:27:53.818791Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:53.818961Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 6 2025-10-24T16:27:53.819024Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-10-24T16:27:53.850125Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:53.850157Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:53.850246Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-10-24T16:27:53.861745Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:55.003294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:55.003327Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:55.003333Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:55.003344Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-10-24T16:27:55.003351Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:55.003444Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:3627:3350], ActorId: [2:3628:3351], Starting query actor #1 [2:3629:3352] 2025-10-24T16:27:55.003456Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:3628:3351], ActorId: [2:3629:3352], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:55.006971Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:3628:3351], ActorId: [2:3629:3352], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjU1MDExODYtYmEzMzY1MTgtOWMwMGRlNzMtNTM1ZmY0YmQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:55.014196Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3638:3361]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:55.014250Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-10-24T16:27:55.014262Z node 2 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [2:3640:3363] 2025-10-24T16:27:55.014272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [2:3640:3363] 2025-10-24T16:27:55.014369Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:3641:3364] 2025-10-24T16:27:55.014395Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:3640:3363], server id = [2:3641:3364], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:55.014413Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3641:3364], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:55.014419Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:55.014444Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:55.014459Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:3638:3361], StatRequests.size() = 1 2025-10-24T16:27:55.014467Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:55.034546Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3628:3351], ActorId: [2:3629:3352], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjU1MDExODYtYmEzMzY1MTgtOWMwMGRlNzMtNTM1ZmY0YmQ=, TxId: 2025-10-24T16:27:55.034575Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3628:3351], ActorId: [2:3629:3352], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjU1MDExODYtYmEzMzY1MTgtOWMwMGRlNzMtNTM1ZmY0YmQ=, TxId: 2025-10-24T16:27:55.034655Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3627:3350], ActorId: [2:3628:3351], Got response [2:3629:3352] SUCCESS 2025-10-24T16:27:55.034749Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:55.046313Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:55.046339Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:55.149562Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:55.149592Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:55.224549Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3640:3363], schemeshard count = 1 2025-10-24T16:27:56.101614Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:56.101650Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-10-24T16:27:56.101656Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:670: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:57.100993Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:57.111343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:57.111400Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-10-24T16:27:57.111404Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:57.111494Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:3729:3404], ActorId: [2:3730:3405], Starting query actor #1 [2:3731:3406] 2025-10-24T16:27:57.111509Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:3730:3405], ActorId: [2:3731:3406], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:57.112419Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:3730:3405], ActorId: [2:3731:3406], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODgwMGY3MDUtM2Q0OTJiYzYtYzIyODBlNjMtZmVmOTY3Y2I=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-10-24T16:27:57.115404Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3730:3405], ActorId: [2:3731:3406], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODgwMGY3MDUtM2Q0OTJiYzYtYzIyODBlNjMtZmVmOTY3Y2I=, TxId: 2025-10-24T16:27:57.115421Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3730:3405], ActorId: [2:3731:3406], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODgwMGY3MDUtM2Q0OTJiYzYtYzIyODBlNjMtZmVmOTY3Y2I=, TxId: 2025-10-24T16:27:57.115483Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3729:3404], ActorId: [2:3730:3405], Got response [2:3731:3406] SUCCESS 2025-10-24T16:27:57.115546Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:57.127036Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:57.127058Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2732:3231] 2025-10-24T16:27:57.127180Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3754:3420]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:57.127846Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:57.127871Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:57.127933Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:57.127940Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:57.127946Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:57.128561Z node 2 :STATISTICS ERROR: service_impl.cpp:691: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-10-24T16:27:57.128625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-10-24T16:27:19.709279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.749441Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.749536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.749575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002b08/r3tmp/tmpwrM02V/pdisk_1.dat 2025-10-24T16:27:19.845750Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:19.878228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:19.878269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.911574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9687, node 1 2025-10-24T16:27:20.005748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.005768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.005771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.005907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.008990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.034931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20165 2025-10-24T16:27:20.383730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.239753Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.242012Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.242792Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.250050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.250076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.272754Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.273194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.355528Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.355560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.367168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.431221Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.441556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.463323Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463493Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463659Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463868Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463893Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463915Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463946Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.463989Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.524916Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.535836Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.535887Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.546679Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.546712Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1865:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.546938Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.547010Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.547035Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.547040Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.547046Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.547053Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.547059Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.547067Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.547231Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.548872Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1876:2594] 2025-10-24T16:27:21.548939Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1876:2594], schemeshard id = 72075186224037897 2025-10-24T16:27:21.595275Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1920:2612] 2025-10-24T16:27:21.597134Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.600851Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.600871Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.600882Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.602989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.604708Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.604738Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:21.751127Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.797627Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:21.891602Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.771078Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.809691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2225:3056], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.809729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.809815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.809820Z node 1 : ... id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:53.797117Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:53.797156Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:53.797172Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:4932:4478], StatRequests.size() = 1 2025-10-24T16:27:53.797184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:53.815950Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4922:4468], ActorId: [2:4923:4469], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2E2YzMyMDQtZmI2ZWM5MDAtZjc4OTIwNGEtNzcyYTZiMDE=, TxId: 2025-10-24T16:27:53.815975Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4922:4468], ActorId: [2:4923:4469], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2E2YzMyMDQtZmI2ZWM5MDAtZjc4OTIwNGEtNzcyYTZiMDE=, TxId: 2025-10-24T16:27:53.816057Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4921:4467], ActorId: [2:4922:4468], Got response [2:4923:4469] SUCCESS 2025-10-24T16:27:53.816105Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.827565Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:53.827587Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:53.890272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:53.890298Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:53.931651Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:4934:4480], schemeshard count = 1 2025-10-24T16:27:56.031570Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:56.031594Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:56.031601Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:56.031605Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:56.032631Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:56.044479Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:56.044625Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:56.044643Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:56.044949Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-10-24T16:27:56.044956Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:65: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-10-24T16:27:56.044963Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:57.111936Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:57.123703Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:57.123785Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:57.124042Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5081:4549], server id = [2:5085:4553], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:57.124174Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5081:4549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.124226Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5082:4550], server id = [2:5086:4554], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:57.124231Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5082:4550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.124247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5083:4551], server id = [2:5087:4555], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:57.124251Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5083:4551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.124449Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5084:4552], server id = [2:5088:4556], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:57.124456Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5084:4552], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.125636Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:57.125765Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5081:4549], server id = [2:5085:4553], tablet id = 72075186224037899 2025-10-24T16:27:57.125771Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.125904Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:57.125990Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5082:4550], server id = [2:5086:4554], tablet id = 72075186224037900 2025-10-24T16:27:57.125994Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.126092Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:57.126160Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5083:4551], server id = [2:5087:4555], tablet id = 72075186224037901 2025-10-24T16:27:57.126163Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.126185Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:57.126189Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:57.126219Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5084:4552], server id = [2:5088:4556], tablet id = 72075186224037902 2025-10-24T16:27:57.126221Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.126241Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:57.126261Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:57.126323Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5101:4565], ActorId: [2:5102:4566], Starting query actor #1 [2:5103:4567] 2025-10-24T16:27:57.126331Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5102:4566], ActorId: [2:5103:4567], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:57.126891Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5102:4566], ActorId: [2:5103:4567], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTExODA1YzktMzU1YjU2NzYtZjAyOGExNDgtMmQxMzNhYTM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:57.133371Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5112:4576]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:57.133459Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:57.133465Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5112:4576], StatRequests.size() = 1 2025-10-24T16:27:57.191682Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5102:4566], ActorId: [2:5103:4567], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTExODA1YzktMzU1YjU2NzYtZjAyOGExNDgtMmQxMzNhYTM=, TxId: 2025-10-24T16:27:57.191708Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5102:4566], ActorId: [2:5103:4567], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTExODA1YzktMzU1YjU2NzYtZjAyOGExNDgtMmQxMzNhYTM=, TxId: 2025-10-24T16:27:57.191769Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-10-24T16:27:57.191805Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5101:4565], ActorId: [2:5102:4566], Got response [2:5103:4567] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:57.191907Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5125:4582]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:57.191962Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:57.192063Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:57.192070Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:57.192222Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:57.192230Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:57.192236Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:57.193243Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpQuery::UdfTerminate >> KqpStats::DataQueryWithEffects+UseSink >> KqpQuery::PreparedQueryInvalidate >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-10-24T16:27:54.589269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:54.589481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:54.617688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:54.619822Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:305:2228], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:54.619955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:27:54.619964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:54.620321Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:689:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:54.620367Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:54.620392Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001878/r3tmp/tmpqYCuyt/pdisk_1.dat 2025-10-24T16:27:54.734387Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.769784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.769814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.769943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.769949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.801546Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:54.801713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.801815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.855416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:54.890655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.912926Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:55.152502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.183162Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:55.185978Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:55.186106Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1307:2389] 2025-10-24T16:27:55.186156Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:55.187326Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [2:1281:2376], Recipient [2:1307:2389]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:55.198306Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:55.198473Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:55.198675Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:55.198692Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:55.198702Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:55.198792Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:55.198893Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:55.198914Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1331:2389] in generation 1 2025-10-24T16:27:55.199827Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:55.204509Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:55.204579Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:55.204601Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1335:2406] 2025-10-24T16:27:55.204606Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:55.204611Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:55.204618Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:55.204697Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:1307:2389], Recipient [2:1307:2389]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:55.204704Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:55.204790Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:55.204808Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:55.204823Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:55.204833Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:55.204840Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:55.204846Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:55.204850Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:55.204855Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:55.204860Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:55.204896Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [2:1304:2387], Recipient [2:1307:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:55.204900Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:55.204905Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1290:2774], serverId# [2:1304:2387], sessionId# [0:0:0] 2025-10-24T16:27:55.204996Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:823:2464], Recipient [2:1304:2387] 2025-10-24T16:27:55.205001Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:55.205014Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:55.205065Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:55.205077Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:55.205097Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-10-24T16:27:55.205104Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:55.205109Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:55.205114Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:55.205118Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:55.205168Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:55.205172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:55.205176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710657] at 72075186224037888 to e ... ecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2503 UnfrozenTablets: 72075186224037888 2025-10-24T16:27:57.728475Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:57.728487Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [2503:281474976710669] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:27:57.728494Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976710669 2025-10-24T16:27:57.728502Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976710669 2025-10-24T16:27:57.728507Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [2503:281474976710669] at 72075186224037888 on unit CompleteWrite 2025-10-24T16:27:57.728520Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:57.728533Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:27:57.728538Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:27:57.728542Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:27:57.728562Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2503} 2025-10-24T16:27:57.728607Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-10-24T16:27:57.728615Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [2503:281474976710669] at 72075186224037890 on unit ExecuteWrite 2025-10-24T16:27:57.728620Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976710669 2025-10-24T16:27:57.728625Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976710669 2025-10-24T16:27:57.728629Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [2503:281474976710669] at 72075186224037890 on unit CompleteWrite 2025-10-24T16:27:57.728635Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:27:57.728645Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-10-24T16:27:57.728649Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-10-24T16:27:57.728696Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [2:2077:2500], Recipient [2:2204:2530]: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-10-24T16:27:57.728702Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:57.728707Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976710669 2025-10-24T16:27:57.728716Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-10-24T16:27:57.728751Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [2503 : 281474976710669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2300:3341] 2025-10-24T16:27:57.728877Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-10-24T16:27:57.728912Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [2:2204:2530], Recipient [2:2077:2500]: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-10-24T16:27:57.728917Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:57.728921Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976710669 2025-10-24T16:27:57.728927Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-10-24T16:27:57.728945Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [2503 : 281474976710669] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2298:3341] 2025-10-24T16:27:57.729023Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:57.729214Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:64:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 2503 UnfrozenTablets: 72075186224037890 2025-10-24T16:27:57.729261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [2:2077:2500], Recipient [1:2188:3315] 2025-10-24T16:27:57.729267Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:57.729275Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710669 2025-10-24T16:27:57.729284Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-10-24T16:27:57.729296Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-10-24T16:27:57.729317Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:64:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2503} 2025-10-24T16:27:57.729378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [2:2204:2530], Recipient [1:2188:3315] 2025-10-24T16:27:57.729383Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:27:57.729389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976710669 2025-10-24T16:27:57.729396Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-10-24T16:27:57.729418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [2503 : 281474976710669] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2299:3341] 2025-10-24T16:27:57.729569Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:27:57.731349Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-10-24T16:27:57.731418Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [2:2204:2530], Recipient [2:2077:2500]: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-10-24T16:27:57.731424Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.731429Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976710669 2025-10-24T16:27:57.731504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [2:2204:2530], Recipient [1:2136:3274] 2025-10-24T16:27:57.731509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.731522Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976710669 2025-10-24T16:27:57.731644Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-10-24T16:27:57.731758Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:2136:3274], Recipient [2:2077:2500] 2025-10-24T16:27:57.731768Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.731776Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710669 2025-10-24T16:27:57.731785Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [1:2136:3274], Recipient [2:2204:2530] 2025-10-24T16:27:57.731789Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.731794Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710669 2025-10-24T16:27:57.731899Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:27:57.731919Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [2:2077:2500], Recipient [2:2204:2530]: {TEvReadSet step# 2503 txid# 281474976710669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-10-24T16:27:57.731924Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.731929Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710669 2025-10-24T16:27:57.731993Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [2:2077:2500], Recipient [1:2136:3274] 2025-10-24T16:27:57.731998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:27:57.732004Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710669 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-10-24T16:27:23.041988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.085215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:23.085312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:23.085352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a1a/r3tmp/tmpuJjvo1/pdisk_1.dat 2025-10-24T16:27:23.214943Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.248497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.248544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.281889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15856, node 1 2025-10-24T16:27:23.394467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:23.394497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:23.394502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:23.394699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:23.400210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:23.443533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63262 2025-10-24T16:27:23.790640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:24.599581Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:24.602226Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:24.603045Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:24.610911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:24.610944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:24.640681Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:24.647004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:24.764645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:24.764685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:24.778891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:24.838610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.860362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.862789Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.862966Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863138Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863154Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863195Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863227Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.863245Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.953698Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:24.961180Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:24.961207Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:24.970415Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:24.970495Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:24.970539Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:24.970546Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:24.970553Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:24.970561Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:24.970567Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:24.970576Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:24.970723Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:24.973908Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:24.973936Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:24.975834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:24.975913Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:25.021568Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:25.023419Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:25.030350Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:25.030373Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:25.030385Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:25.033250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:25.035237Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:25.035273Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:25.050509Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:25.194812Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:25.262055Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:25.371516Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:26.323099Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:26.371800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2229:3061], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.371871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.371983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2247:3066], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.371994Z node 1 ... l.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:53.946619Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4139:3814], ActorId: [2:4140:3815], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Yjg1MzVkMzQtYWU4YTBmMjktYmZlZTQxZGItMzQxOTI2NWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:53.951476Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4149:3824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:53.951532Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:53.951537Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4149:3824], StatRequests.size() = 1 2025-10-24T16:27:53.985043Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4139:3814], ActorId: [2:4140:3815], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjg1MzVkMzQtYWU4YTBmMjktYmZlZTQxZGItMzQxOTI2NWQ=, TxId: 2025-10-24T16:27:53.985063Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4139:3814], ActorId: [2:4140:3815], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjg1MzVkMzQtYWU4YTBmMjktYmZlZTQxZGItMzQxOTI2NWQ=, TxId: 2025-10-24T16:27:53.985132Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4138:3813], ActorId: [2:4139:3814], Got response [2:4140:3815] SUCCESS 2025-10-24T16:27:53.985202Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:53.996495Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:53.996516Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:54.048871Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4167:3832] 2025-10-24T16:27:54.049072Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3110:3333] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-10-24T16:27:54.049079Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:38: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3110:3333] 2025-10-24T16:27:54.049092Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:100: [72075186224037894] TTxAnalyze::Complete 2025-10-24T16:27:54.361347Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:54.361375Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:54.819087Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:54.819124Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:54.819311Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:54.830750Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:54.830885Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:54.830897Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:54.842328Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:55.706088Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:55.706125Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:55.706131Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:55.706200Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:55.706341Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:55.706370Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:55.720516Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:56.507559Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:56.507596Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:56.507704Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:56.519074Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:56.568540Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:56.568569Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:56.568573Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:57.469613Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:57.469679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:57.469686Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:57.469916Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:57.482478Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:57.482601Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:57.482618Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:57.482743Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:57.494490Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:57.494575Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:57.494778Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4305:3903], server id = [2:4306:3904], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:57.494816Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4305:3903], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.495218Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:57.495237Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:57.495288Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:57.495319Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:57.495395Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4308:3906], ActorId: [2:4309:3907], Starting query actor #1 [2:4310:3908] 2025-10-24T16:27:57.495408Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4309:3907], ActorId: [2:4310:3908], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:57.496374Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4305:3903], server id = [2:4306:3904], tablet id = 72075186224037899 2025-10-24T16:27:57.496399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.496679Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4309:3907], ActorId: [2:4310:3908], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OGRiODI4NzEtOWQyNTEwMmUtYjJkMjgxZmYtYzExOGYzY2Q=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:57.510470Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:212: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:00:35.000000Z, event interval end# 2025-10-24T16:27:55.000000Z 2025-10-24T16:27:57.510520Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4309:3907], ActorId: [2:4310:3908], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGRiODI4NzEtOWQyNTEwMmUtYjJkMjgxZmYtYzExOGYzY2Q=, TxId: 2025-10-24T16:27:57.510528Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4309:3907], ActorId: [2:4310:3908], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGRiODI4NzEtOWQyNTEwMmUtYjJkMjgxZmYtYzExOGYzY2Q=, TxId: 2025-10-24T16:27:57.510599Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4308:3906], ActorId: [2:4309:3907], Got response [2:4310:3908] SUCCESS 2025-10-24T16:27:57.510658Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:57.522600Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:57.522631Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3110:3333] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::CreateTableAs-Stats >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar |61.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |61.0%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-10-24T16:27:19.943249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.978390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.978482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.978522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ad5/r3tmp/tmpDXJFKz/pdisk_1.dat 2025-10-24T16:27:20.081401Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.114143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.114188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.149402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29350, node 1 2025-10-24T16:27:20.238425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.238446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.238450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.238625Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.244437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.289781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22189 2025-10-24T16:27:20.624592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.534079Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.536444Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.537264Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.545129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.545161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.568217Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.568697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.650463Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.650504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.663035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.730626Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.752334Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752740Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752840Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752851Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752867Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752930Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752951Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.752968Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.818005Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.823262Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.823291Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.829338Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.829690Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.829722Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.829728Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.829734Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.829741Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.829747Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.829755Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.830266Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.830488Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:21.830763Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.832747Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.832764Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.832776Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.837622Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:21.838529Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:21.840038Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.840066Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.841624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:21.841668Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:21.864468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.922947Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.059556Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.061461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.062710Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.062735Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:22.145891Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:22.366573Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.870003Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.918734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.918773Z ... ribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:54.412127Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:54.412195Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:54.412387Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5064:4559], server id = [2:5068:4563], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:54.412599Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5064:4559], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.412678Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5065:4560], server id = [2:5069:4564], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:54.412683Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5065:4560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.412940Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5066:4561], server id = [2:5070:4565], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:54.412952Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5066:4561], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.413151Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5067:4562], server id = [2:5071:4566], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:54.413165Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5067:4562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:54.414310Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:54.414368Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5064:4559], server id = [2:5068:4563], tablet id = 72075186224037899 2025-10-24T16:27:54.414372Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.414599Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:54.414691Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5065:4560], server id = [2:5069:4564], tablet id = 72075186224037900 2025-10-24T16:27:54.414695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.414834Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:54.414893Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5066:4561], server id = [2:5070:4565], tablet id = 72075186224037901 2025-10-24T16:27:54.414897Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.414963Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:54.414970Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:54.415012Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:54.415091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5067:4562], server id = [2:5071:4566], tablet id = 72075186224037902 2025-10-24T16:27:54.415094Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:54.467987Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:54.468054Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:54.906572Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 3 2025-10-24T16:27:54.906608Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:56.520694Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:56.520793Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:27:56.520837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:27:56.595553Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:56.595590Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:56.595702Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:56.608402Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:57.680043Z node 2 :STATISTICS INFO: service_impl.cpp:416: Node 3 is unavailable 2025-10-24T16:27:57.680075Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:57.680133Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:57.680154Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:57.680187Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-10-24T16:27:57.680191Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:57.680374Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:57.692005Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:57.692062Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-10-24T16:27:57.692260Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5182:4620], server id = [2:5183:4621], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:57.692283Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5182:4620], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:57.692587Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:57.692594Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:57.692621Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:57.692666Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:57.692744Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5185:4623], ActorId: [2:5186:4624], Starting query actor #1 [2:5187:4625] 2025-10-24T16:27:57.692754Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5186:4624], ActorId: [2:5187:4625], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:57.693360Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5182:4620], server id = [2:5183:4621], tablet id = 72075186224037900 2025-10-24T16:27:57.693368Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:57.693426Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5186:4624], ActorId: [2:5187:4625], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjAyMmQwYWUtNDkwZDZjYTItMjcwOTNmYTItNzg3OWI1MTc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:57.699365Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5196:4634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:57.699417Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:57.699422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5196:4634], StatRequests.size() = 1 2025-10-24T16:27:57.727707Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5186:4624], ActorId: [2:5187:4625], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjAyMmQwYWUtNDkwZDZjYTItMjcwOTNmYTItNzg3OWI1MTc=, TxId: 2025-10-24T16:27:57.727737Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5186:4624], ActorId: [2:5187:4625], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjAyMmQwYWUtNDkwZDZjYTItMjcwOTNmYTItNzg3OWI1MTc=, TxId: 2025-10-24T16:27:57.727827Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:212: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:00:35.000000Z, event interval end# 2025-10-24T16:27:55.000000Z 2025-10-24T16:27:57.727900Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5185:4623], ActorId: [2:5186:4624], Got response [2:5187:4625] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-10-24T16:27:57.728002Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5210:4640]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:57.728081Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:57.728087Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:57.728134Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:57.728365Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:57.728379Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:57.728387Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:57.729682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 11487, MsgBus: 12263 2025-10-24T16:27:54.517969Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825861622144761:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.518093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020ca/r3tmp/tmpXXbxdG/pdisk_1.dat 2025-10-24T16:27:54.550385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.562014Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825861622144733:2081] 1761323274517332 != 1761323274517335 2025-10-24T16:27:54.565061Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11487, node 1 2025-10-24T16:27:54.570819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.570834Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.570835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.570887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12263 TClient is connected to server localhost:12263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:54.625114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.625140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.626171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.629502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.820969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.843183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861622145418:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.843202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861622145407:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.843247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.843864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:54.844025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861622145448:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.844047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.844100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861622145451:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.844111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.845232Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825861622145421:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:27:54.910037Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825861622145476:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:54.955607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.046490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:27:55.059557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:27:55.059614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:27:55.059671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:27:55.059705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:27:55.059735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:27:55.059770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:27:55.059803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:27:55.059825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:27:55.059873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:27:55.059905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:27:55.059932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:27:55.059956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:27:55.059982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825865917113052:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:27:55.065156Z no ... ERROR: schemereq.cpp:590: Actor# [4:7564825874857010627:2365] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:57.464693Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.525956Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.574548Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:57.581119Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:57.589276Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 64716, MsgBus: 10203 2025-10-24T16:27:57.763646Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7564825875606365392:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:57.763667Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020ca/r3tmp/tmpUl2xwF/pdisk_1.dat 2025-10-24T16:27:57.766521Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:57.775798Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:57.779398Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:57.779424Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:57.780682Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64716, node 5 2025-10-24T16:27:57.787311Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:57.787325Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:57.787327Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:57.787386Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10203 TClient is connected to server localhost:10203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:57.843838Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:57.848136Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:27:57.861465Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:27:57.989280Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:58.153942Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879901333362:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.153974Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.153980Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879901333374:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.154007Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879901333376:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.154014Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.154650Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:58.156511Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825879901333378:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-10-24T16:27:58.217359Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825879901333429:2364] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:58.222511Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.275202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.318351Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:58.323180Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:58.331151Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 16864, MsgBus: 64494 2025-10-24T16:27:54.209380Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825862087942759:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.209422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020d9/r3tmp/tmpnJ35YK/pdisk_1.dat 2025-10-24T16:27:54.234355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.242371Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825862087942654:2081] 1761323274208824 != 1761323274208827 2025-10-24T16:27:54.244357Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16864, node 1 2025-10-24T16:27:54.249212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.249232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.249234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.249272Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64494 TClient is connected to server localhost:64494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.312319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.312342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.313416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.316190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.326604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.342063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.358779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.371022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.495873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862087944279:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.495893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.495957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862087944289:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.495964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.534180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.536779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.543656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.551673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.565477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.579784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.593441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.607643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.621938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.640242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862087945174:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.640267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.640367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862087945180:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.640378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.640393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862087945179:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.641302Z node 1 :FLAT_TX_SCHEMESHARD ... nt=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-10-24T16:27:57.921072Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-10-24T16:27:57.921076Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-10-24T16:27:57.921080Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-10-24T16:27:57.921088Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-10-24T16:27:57.921091Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-10-24T16:27:57.921099Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-10-24T16:27:57.921107Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-10-24T16:27:57.921451Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[4:7564825876235700665:2475];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976710668;this=56838384024864;method=TTxController::StartProposeOnExecute;tx_info=281474976710668:TX_KIND_SCHEMA;min=1761323277921;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=212:3;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.921470Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[4:7564825876235700659:2474];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976710668;this=56838384023984;method=TTxController::StartProposeOnExecute;tx_info=281474976710668:TX_KIND_SCHEMA;min=1761323277921;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=222:3;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.922128Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[4:7564825876235700677:2477];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976710668;this=56838383051792;method=TTxController::StartProposeOnExecute;tx_info=281474976710668:TX_KIND_SCHEMA;min=1761323277922;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=242:3;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.922203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[4:7564825876235700667:2476];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976710668;this=56838384023104;method=TTxController::StartProposeOnExecute;tx_info=281474976710668:TX_KIND_SCHEMA;min=1761323277922;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=232:3;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923437Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923448Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923460Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923462Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923467Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.923470Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924851Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924854Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924933Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924951Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:57.924954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710668;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=12;result=not_found; 2025-10-24T16:27:58.071401Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[4:7564825876235700659:2474];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976710670;this=56838494015792;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1761323278071;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=222:4;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.071499Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[4:7564825876235700665:2475];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976710670;this=56838494015968;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1761323278071;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=212:4;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.071528Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[4:7564825876235700667:2476];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976710670;this=56838494016320;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1761323278071;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=232:4;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.071587Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[4:7564825876235700677:2477];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976710670;this=56838494052224;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1761323278071;max=18446744073709551615;plan=0;src=[4:7564825876235699121:2141];cookie=242:4;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.073290Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;event=move_table_progress;old_path_id=12;new_path_id=13;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.073377Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;event=move_table_progress;old_path_id=12;new_path_id=13;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.073453Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;event=move_table_progress;old_path_id=12;new_path_id=13;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.073539Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;event=move_table_progress;old_path_id=12;new_path_id=13;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-10-24T16:27:58.074603Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037911 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710670 2025-10-24T16:27:58.074605Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037909 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710670 2025-10-24T16:27:58.074610Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037910 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710670 2025-10-24T16:27:58.074615Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037908 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710670 2025-10-24T16:27:58.075567Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:58.078211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:27:58.196326Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpQuery::MixedCreateAsSelect [GOOD] >> KqpQuery::ExecuteWriteQuery >> KqpQuery::RandomNumber >> KqpExplain::LimitOffset >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn >> KqpStats::RequestUnitForExecute [GOOD] >> KqpQuery::QueryCacheTtl >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::MultiTxStatsFullYql >> KqpExplain::CreateTableAs-Stats [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-10-24T16:27:21.132903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.159503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.161667Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.161769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.161785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002aaa/r3tmp/tmpJx8byt/pdisk_1.dat 2025-10-24T16:27:21.228813Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.261285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.261312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.282333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25063, node 1 2025-10-24T16:27:21.341794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.341816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.341821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.341922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.344984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.369934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61773 2025-10-24T16:27:21.715476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.507097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.511478Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.514046Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.527390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.527441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.564627Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.566789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.674806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.674860Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.675279Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675423Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675588Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675770Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675875Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675902Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675934Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.675964Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.690956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.754267Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.765529Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.765557Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.770516Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.770676Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.770693Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.770697Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.770701Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.770706Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.770710Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.770715Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.770795Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.771489Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1613:2453] 2025-10-24T16:27:22.773334Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.775053Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.775068Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.775085Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.777364Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.777381Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1845:2593], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.780257Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1863:2606] 2025-10-24T16:27:22.780316Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1863:2606], schemeshard id = 72075186224037897 2025-10-24T16:27:22.784364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.786239Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.786274Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:22.839749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.897974Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.943584Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.025224Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.145109Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.875424Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.934186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2210:3050], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.934239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.934367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3055], DatabaseId: /Root, PoolId: default, Failed to fetch ... node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:58.022033Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:58.022226Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:58.022285Z node 2 :STATISTICS DEBUG: tx_init.cpp:55: [72075186224037894] Loaded database: /Root/Database 2025-10-24T16:27:58.022297Z node 2 :STATISTICS DEBUG: tx_init.cpp:59: [72075186224037894] Loaded traversal start key 2025-10-24T16:27:58.022305Z node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-10-24T16:27:58.022310Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 4 2025-10-24T16:27:58.022315Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1761323277989021 2025-10-24T16:27:58.022320Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-10-24T16:27:58.022324Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-10-24T16:27:58.022332Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-10-24T16:27:58.022336Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:58.022346Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-10-24T16:27:58.022350Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:58.022354Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:58.022361Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:58.022406Z node 2 :STATISTICS DEBUG: tx_init.cpp:303: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.022705Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:58.022861Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:58.022875Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:58.022987Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:58.023086Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:58.023099Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:58.023520Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:58.036281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5054:4563] 2025-10-24T16:27:58.036358Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5054:4563], schemeshard id = 72075186224037897 2025-10-24T16:27:58.036389Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5011:4536], server id = [2:5055:4564], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:58.036428Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:5055:4564] 2025-10-24T16:27:58.036447Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:5055:4564], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:58.080516Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:58.080600Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:58.080942Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5060:4569], server id = [2:5064:4573], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:58.081130Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5060:4569], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.081218Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5061:4570], server id = [2:5065:4574], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:58.081228Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5061:4570], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.081536Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5062:4571], server id = [2:5066:4575], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:58.081553Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5062:4571], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.081814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:5063:4572], server id = [2:5067:4576], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:58.081827Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:5063:4572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.083316Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:58.083400Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5060:4569], server id = [2:5064:4573], tablet id = 72075186224037899 2025-10-24T16:27:58.083406Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.083569Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:58.083648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5061:4570], server id = [2:5065:4574], tablet id = 72075186224037900 2025-10-24T16:27:58.083653Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.083844Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:58.083929Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:58.083937Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:58.083992Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:58.084022Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:58.084088Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:5080:4585], ActorId: [2:5081:4586], Starting query actor #1 [2:5082:4587] 2025-10-24T16:27:58.084100Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:5081:4586], ActorId: [2:5082:4587], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:58.084781Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5062:4571], server id = [2:5066:4575], tablet id = 72075186224037901 2025-10-24T16:27:58.084791Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.084874Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:5063:4572], server id = [2:5067:4576], tablet id = 72075186224037902 2025-10-24T16:27:58.084883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.085049Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:5081:4586], ActorId: [2:5082:4587], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjU4NzAzMjEtYzA3OWZiMDItMWM4N2U1NWItOTc4MmMyN2Q=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:58.092269Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5091:4596]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:58.092326Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:58.092331Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:5091:4596], StatRequests.size() = 1 2025-10-24T16:27:58.167230Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:5081:4586], ActorId: [2:5082:4587], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjU4NzAzMjEtYzA3OWZiMDItMWM4N2U1NWItOTc4MmMyN2Q=, TxId: 2025-10-24T16:27:58.167257Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:5081:4586], ActorId: [2:5082:4587], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjU4NzAzMjEtYzA3OWZiMDItMWM4N2U1NWItOTc4MmMyN2Q=, TxId: 2025-10-24T16:27:58.167348Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:5080:4585], ActorId: [2:5081:4586], Got response [2:5082:4587] SUCCESS 2025-10-24T16:27:58.167447Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:58.179527Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.179555Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:58.253583Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5116:4604]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:58.253745Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:58.253756Z node 2 :STATISTICS DEBUG: service_impl.cpp:813: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-10-24T16:27:58.253821Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-10-24T16:27:58.253834Z node 2 :STATISTICS DEBUG: service_impl.cpp:715: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-10-24T16:27:58.253844Z node 2 :STATISTICS DEBUG: service_impl.cpp:656: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-10-24T16:27:58.255346Z node 2 :STATISTICS DEBUG: service_impl.cpp:1154: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary >> KqpExplain::UpdateOnSecondary+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpQuery::QueryCache ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-10-24T16:27:20.920119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:20.956604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:20.956693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:20.956728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002abd/r3tmp/tmpIoaxLZ/pdisk_1.dat 2025-10-24T16:27:21.063647Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.096363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.096403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.128734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8750, node 1 2025-10-24T16:27:21.211174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:21.211190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:21.211194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:21.211330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:21.213783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:21.259509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:65383 2025-10-24T16:27:21.573157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:22.365824Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.368721Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:22.369744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:22.384080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.384121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.407708Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:22.408451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.495977Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.496015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.511491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:22.571711Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.598186Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598413Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598590Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598702Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598758Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598796Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598825Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.598842Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:22.671699Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.679061Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:22.679099Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:22.688760Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:22.688854Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:22.688885Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:22.688890Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:22.688897Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:22.688903Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:22.688909Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:22.688917Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:22.689055Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:22.691294Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.691320Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:22.693294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:22.693340Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:22.739469Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:22.740844Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:22.745536Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:22.745562Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:22.745574Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:22.749017Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:22.751400Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:22.751444Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:22.810906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:22.897997Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:22.965195Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:23.080242Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:23.937800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.999254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.999303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.999428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2249:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:23.999437Z node 1 : ... regator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.754740Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:58.766677Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:58.766884Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:58.766911Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:58.767113Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:58.779038Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:58.779106Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-10-24T16:27:58.779374Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7120:6180], server id = [2:7125:6185], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:58.779410Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7120:6180], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.779724Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7121:6181], server id = [2:7126:6186], tablet id = 72075186224037900, status = OK 2025-10-24T16:27:58.779742Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7121:6181], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.779792Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7122:6182], server id = [2:7127:6187], tablet id = 72075186224037901, status = OK 2025-10-24T16:27:58.779798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7122:6182], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.779928Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7123:6183], server id = [2:7128:6188], tablet id = 72075186224037902, status = OK 2025-10-24T16:27:58.779937Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7123:6183], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.780095Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7124:6184], server id = [2:7129:6189], tablet id = 72075186224037903, status = OK 2025-10-24T16:27:58.780106Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7124:6184], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.780254Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:58.780377Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7120:6180], server id = [2:7125:6185], tablet id = 72075186224037899 2025-10-24T16:27:58.780383Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.780536Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:27:58.780709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7121:6181], server id = [2:7126:6186], tablet id = 72075186224037900 2025-10-24T16:27:58.780716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.780762Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-10-24T16:27:58.780786Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7133:6193], server id = [2:7137:6197], tablet id = 72075186224037904, status = OK 2025-10-24T16:27:58.780797Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7133:6193], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.780964Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-10-24T16:27:58.781010Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7122:6182], server id = [2:7127:6187], tablet id = 72075186224037901 2025-10-24T16:27:58.781014Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.781073Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-10-24T16:27:58.781184Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7136:6196], server id = [2:7139:6199], tablet id = 72075186224037905, status = OK 2025-10-24T16:27:58.781195Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7136:6196], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.781239Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7123:6183], server id = [2:7128:6188], tablet id = 72075186224037902 2025-10-24T16:27:58.781242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.781372Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7124:6184], server id = [2:7129:6189], tablet id = 72075186224037903 2025-10-24T16:27:58.781378Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.781406Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7138:6198], server id = [2:7143:6203], tablet id = 72075186224037906, status = OK 2025-10-24T16:27:58.781414Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7138:6198], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.781486Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-10-24T16:27:58.781539Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7141:6201], server id = [2:7144:6204], tablet id = 72075186224037907, status = OK 2025-10-24T16:27:58.781548Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7141:6201], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.781724Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:7142:6202], server id = [2:7145:6205], tablet id = 72075186224037908, status = OK 2025-10-24T16:27:58.781736Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:7142:6202], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.781798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7133:6193], server id = [2:7137:6197], tablet id = 72075186224037904 2025-10-24T16:27:58.781802Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.781829Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-10-24T16:27:58.781973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7136:6196], server id = [2:7139:6199], tablet id = 72075186224037905 2025-10-24T16:27:58.781980Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.782019Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-10-24T16:27:58.782073Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-10-24T16:27:58.782139Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7138:6198], server id = [2:7143:6203], tablet id = 72075186224037906 2025-10-24T16:27:58.782155Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.782199Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7141:6201], server id = [2:7144:6204], tablet id = 72075186224037907 2025-10-24T16:27:58.782202Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.782235Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-10-24T16:27:58.782241Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:58.782304Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:58.782337Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:58.782406Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:7150:6210], ActorId: [2:7151:6211], Starting query actor #1 [2:7152:6212] 2025-10-24T16:27:58.782418Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:7151:6211], ActorId: [2:7152:6212], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:58.783042Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:7142:6202], server id = [2:7145:6205], tablet id = 72075186224037908 2025-10-24T16:27:58.783053Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.783285Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:7151:6211], ActorId: [2:7152:6212], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjgxZTEyNzgtNjg0MTc4MjMtMjExYzhhZGMtMzVhZGE1OWM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:58.808790Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:7151:6211], ActorId: [2:7152:6212], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjgxZTEyNzgtNjg0MTc4MjMtMjExYzhhZGMtMzVhZGE1OWM=, TxId: 2025-10-24T16:27:58.808819Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:7151:6211], ActorId: [2:7152:6212], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjgxZTEyNzgtNjg0MTc4MjMtMjExYzhhZGMtMzVhZGE1OWM=, TxId: 2025-10-24T16:27:58.808912Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:7150:6210], ActorId: [2:7151:6211], Got response [2:7152:6212] SUCCESS 2025-10-24T16:27:58.809038Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:58.821335Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.821365Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:5788:3808] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 24735, MsgBus: 2374 2025-10-24T16:27:54.487553Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825863043174972:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.487732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020b5/r3tmp/tmpNF4xH5/pdisk_1.dat 2025-10-24T16:27:54.532572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.533152Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.533347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825863043174943:2081] 1761323274487035 != 1761323274487038 TServer::EnableGrpc on GrpcPort 24735, node 1 2025-10-24T16:27:54.540253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.540262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.540263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.540294Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2374 TClient is connected to server localhost:2374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:54.593768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.593798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.594918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.603903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.610253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.630504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.655866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.667906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.793167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.831630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863043176591:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.831649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.831702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863043176601:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.831705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.870940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.878271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.887249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.901319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.908345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.922836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.936958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.951606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.964598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863043177463:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.964622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.964646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863043177468:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.964663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863043177470:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.964679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.965459Z node 1 :FLAT_TX_SCHEMESHARD WA ... :27:57.940407Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:57.940432Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:57.941422Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5514, node 4 2025-10-24T16:27:57.947070Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:57.947084Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:57.947086Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:57.947134Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29933 TClient is connected to server localhost:29933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:57.996208Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.005526Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.016093Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.018951Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:58.038082Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.051381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.314926Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878485072650:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.314949Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.315014Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878485072659:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.315021Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.327349Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.338217Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.346217Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.360139Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.374191Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.388244Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.402141Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.416155Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.433594Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878485073522:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.433638Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.433675Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878485073527:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.433700Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878485073529:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.433713Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.434830Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:58.442788Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825878485073531:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:27:58.508946Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825878485073583:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } Consumed units: 29 Consumed units: 6 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] Test command err: ydb/tests/library/sqs/requests_client.py:140: DeprecationWarning: The 'warn' method is deprecated, use 'warning' instead logger.warn("Last request failed with code {}, reason '{}' and text '{}'".format( ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 23060, MsgBus: 20149 2025-10-24T16:27:54.273380Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825861480272428:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.273420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020d1/r3tmp/tmpUyE2CA/pdisk_1.dat 2025-10-24T16:27:54.302510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.310488Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825861480272325:2081] 1761323274272758 != 1761323274272761 2025-10-24T16:27:54.313268Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23060, node 1 2025-10-24T16:27:54.318029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.318041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.318043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.318084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20149 TClient is connected to server localhost:20149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.380528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.380576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.381119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:54.381598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-10-24T16:27:54.392035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.406327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.427398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.439675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.538369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.573389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861480273972:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.573414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.573478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861480273982:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.573487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.632808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.640706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.649019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.655982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.663278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.677528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.691809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.705903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.721528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861480274844:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.721556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.721563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861480274849:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.721586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825861480274851:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.721593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.722230Z node 1 :FLAT_TX_SCHEMESHARD ... e":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 13464, MsgBus: 27822 2025-10-24T16:27:58.451137Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7564825879032864338:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:58.451168Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020d1/r3tmp/tmpTFuTIa/pdisk_1.dat 2025-10-24T16:27:58.453634Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:58.467097Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:58.467610Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:7564825879032864221:2081] 1761323278450507 != 1761323278450510 2025-10-24T16:27:58.473720Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:58.473755Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:58.475068Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13464, node 5 2025-10-24T16:27:58.481610Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:58.481629Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:58.481632Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:58.481697Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27822 TClient is connected to server localhost:27822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:58.533312Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.614526Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:58.858935Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879032864873:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.858951Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879032864897:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.858957Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.859042Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825879032864903:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.859049Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.859727Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:58.861914Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825879032864902:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:27:58.922319Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825879032864956:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:58.927656Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpLimits::StreamWrite+Allowed >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> KqpExplain::SortStage >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] >> KqpExplain::UpdateSecondaryConditional-UseSink |61.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |61.0%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 10563, MsgBus: 12465 2025-10-24T16:27:55.127079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825866115938602:2070];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:55.127110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020a7/r3tmp/tmpyGuRD1/pdisk_1.dat 2025-10-24T16:27:55.159801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:55.167074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825866115938571:2081] 1761323275126808 != 1761323275126811 2025-10-24T16:27:55.170499Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10563, node 1 2025-10-24T16:27:55.179273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.179283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.179285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:55.179322Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12465 TClient is connected to server localhost:12465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:55.230652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:55.230679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:55.231772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:55.237605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:55.240019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:27:55.249221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.270590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.330785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.342722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.345430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:55.467805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825866115940233:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.467830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.467894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825866115940243:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.467905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.511979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.519160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.531836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.545302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.552227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.566892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.580528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.594759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.610972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825866115941105:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.611003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825866115941110:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.611003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.611029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825866115941112:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.611036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... necting 2025-10-24T16:27:58.552880Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:58.553113Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:7564825878999751216:2081] 1761323278538456 != 1761323278538459 2025-10-24T16:27:58.555338Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9634, node 4 2025-10-24T16:27:58.561925Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:58.561939Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:58.561942Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:58.561988Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24532 TClient is connected to server localhost:24532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:58.640596Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.649843Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.659547Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.680551Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.693937Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:58.700993Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.897279Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878999752855:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.897310Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.897365Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825878999752864:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.897372Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.910633Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.919751Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.934019Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.947776Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.956247Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.968439Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.982971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.997080Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.013814Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825883294721022:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.013839Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825883294721027:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.013842Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.013883Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825883294721030:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.013890Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.014519Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:59.023738Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825883294721029:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:27:59.113021Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825883294721083:3549] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> KqpExplain::FullOuterJoin [GOOD] >> KqpParams::RowsList >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-10-24T16:27:19.537541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.571425Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.571546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.571600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002b3c/r3tmp/tmpNeukJd/pdisk_1.dat 2025-10-24T16:27:19.664566Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:19.697855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:19.697900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:19.731102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2092, node 1 2025-10-24T16:27:19.820371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:19.820400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:19.820406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:19.820586Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:19.825580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:19.862683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:65102 2025-10-24T16:27:20.203838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.080645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.090143Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.090849Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:21.103316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.103355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.126604Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.127136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.210519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.210573Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.222520Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.285773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.307238Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307620Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307733Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307750Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307773Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307870Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307902Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.307927Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.368812Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.373547Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.373573Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.378894Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.379255Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.379283Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.379289Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.379296Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.379302Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.379308Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.379315Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.379591Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.379787Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:21.380159Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.382217Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.382233Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.382245Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.386928Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:21.387708Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:21.389221Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.389255Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.391063Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:21.391127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:21.460764Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.473527Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.576818Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.578371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.579473Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.579500Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:21.683864Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:21.891640Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.390817Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.439098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2224:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.439133Z n ... node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:3982:3724], StatRequests.size() = 1 2025-10-24T16:27:52.900434Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:52.917333Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3972:3714], ActorId: [2:3973:3715], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDdmYjMzNzktMzk0MDgxNi1kMmViYjViMS03M2FlMjhh, TxId: 2025-10-24T16:27:52.917358Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3972:3714], ActorId: [2:3973:3715], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDdmYjMzNzktMzk0MDgxNi1kMmViYjViMS03M2FlMjhh, TxId: 2025-10-24T16:27:52.917445Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3971:3713], ActorId: [2:3972:3714], Got response [2:3973:3715] SUCCESS 2025-10-24T16:27:52.917496Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:52.929134Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:52.929162Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:53.033628Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:53.033657Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:53.096237Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3984:3726], schemeshard count = 1 2025-10-24T16:27:53.975299Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:53.975342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:53.976187Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:53.998604Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:53.998740Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:53.998748Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:54.030620Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:54.030649Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR cookie 0 ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-10-24T16:27:54.030924Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:54.030950Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:54.031076Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3106:3330] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-10-24T16:27:54.031082Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:38: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3106:3330] 2025-10-24T16:27:54.042272Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:54.042301Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:100: [72075186224037894] TTxAnalyze::Complete 2025-10-24T16:27:55.029247Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:55.029308Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:55.029314Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:55.029470Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:55.041137Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:55.041297Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:55.041316Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:55.041600Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:55.053414Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:55.053494Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:27:55.053674Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4110:3793], server id = [2:4111:3794], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:55.053703Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4110:3793], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:55.054564Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:55.054584Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:55.054647Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:55.054675Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:55.054718Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4110:3793], server id = [2:4111:3794], tablet id = 72075186224037899 2025-10-24T16:27:55.054723Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:55.054781Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4115:3797], ActorId: [2:4116:3798], Starting query actor #1 [2:4117:3799] 2025-10-24T16:27:55.054792Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4116:3798], ActorId: [2:4117:3799], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:55.055544Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4116:3798], ActorId: [2:4117:3799], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTdmOWExMjMtM2IzNzNhOGUtNGUyYTc5ZTUtMjM0NDAwZTg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:55.062099Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4126:3808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:55.062191Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:55.062202Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4126:3808], StatRequests.size() = 1 2025-10-24T16:27:55.098475Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4116:3798], ActorId: [2:4117:3799], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTdmOWExMjMtM2IzNzNhOGUtNGUyYTc5ZTUtMjM0NDAwZTg=, TxId: 2025-10-24T16:27:55.098501Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4116:3798], ActorId: [2:4117:3799], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTdmOWExMjMtM2IzNzNhOGUtNGUyYTc5ZTUtMjM0NDAwZTg=, TxId: 2025-10-24T16:27:55.098566Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4115:3797], ActorId: [2:4116:3798], Got response [2:4117:3799] SUCCESS 2025-10-24T16:27:55.098634Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:55.110411Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:55.110439Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3106:3330] 2025-10-24T16:27:55.595484Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:55.595524Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:57.017155Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:57.017240Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:27:57.017287Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:27:57.038211Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:57.090625Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:57.090666Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:57.090768Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:57.102379Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:58.580329Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:58.580373Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-10-24T16:27:59.455054Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:27:59.492238Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:59.492282Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: Skip TEvStatisticsRequestTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 5862, MsgBus: 24623 2025-10-24T16:27:55.094367Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825864471914371:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:55.094583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020ac/r3tmp/tmpA6dnND/pdisk_1.dat 2025-10-24T16:27:55.139128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:55.139928Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:55.140142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825864471914344:2081] 1761323275094061 != 1761323275094064 TServer::EnableGrpc on GrpcPort 5862, node 1 2025-10-24T16:27:55.149731Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.149740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.149741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:55.149772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24623 TClient is connected to server localhost:24623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:55.201061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:55.201089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:55.202276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:55.203291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:55.208622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.224659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.244568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.260954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.436235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:55.493369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864471915993:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.493391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.493434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864471916002:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.493441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.535599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.542126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.551997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.558632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.566165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.580240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.587123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.601688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.617736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864471916865:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.617763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.617815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864471916870:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.617826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864471916871:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.617834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.618418Z node 1 :FLAT_TX_SCHEMESHARD W ... } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503446Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037955;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503549Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503569Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037957;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503716Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503717Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037959;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503899Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037961;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.503972Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504084Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504145Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504252Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504403Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504454Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504532Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504601Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504675Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504776Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504830Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504937Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.504947Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.505080Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.505186Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.505271Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:27:59.505356Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-10-24T16:27:21.797849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.820589Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:21.820653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:21.820678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a8d/r3tmp/tmpiuWNLT/pdisk_1.dat 2025-10-24T16:27:21.916113Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.948551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.948592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.981155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6823, node 1 2025-10-24T16:27:22.066791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:22.066811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:22.066817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:22.066994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:22.070073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:22.105753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20418 2025-10-24T16:27:22.451516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.344091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.350750Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.351541Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.370838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.370878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.395427Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.397098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.489243Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.489288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.505629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.562878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.584793Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585011Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585211Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585341Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585403Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585447Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585480Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.585498Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.653936Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.660471Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.660506Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.670540Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.670641Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.670676Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.670683Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.670690Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.670697Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.670703Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.670714Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.670877Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.673347Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.673381Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1852:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.675711Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1875:2601] 2025-10-24T16:27:23.675764Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1875:2601], schemeshard id = 72075186224037897 2025-10-24T16:27:23.725576Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2609] 2025-10-24T16:27:23.727810Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.735249Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.735289Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.735317Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.740608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:23.743930Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:23.743977Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:23.815548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.900810Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:23.980008Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:24.112350Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:24.992086Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.165075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2231:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.165121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.165232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2249:3068], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.165242Z node 1 : ... tryActor] OwnerId: [2:4133:3805], ActorId: [2:4134:3806], Starting query actor #1 [2:4135:3807] 2025-10-24T16:27:55.830298Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4134:3806], ActorId: [2:4135:3807], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:55.830897Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4128:3801], server id = [2:4129:3802], tablet id = 72075186224037899 2025-10-24T16:27:55.830908Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:55.831039Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4134:3806], ActorId: [2:4135:3807], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTg3N2Y5YjQtNGMxZjc1MDEtOWVjMzc3MGYtNDY4Y2U1NWY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:55.838114Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4144:3816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:55.838204Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:55.838212Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4144:3816], StatRequests.size() = 1 2025-10-24T16:27:55.867830Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4134:3806], ActorId: [2:4135:3807], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTg3N2Y5YjQtNGMxZjc1MDEtOWVjMzc3MGYtNDY4Y2U1NWY=, TxId: 2025-10-24T16:27:55.867901Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4134:3806], ActorId: [2:4135:3807], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTg3N2Y5YjQtNGMxZjc1MDEtOWVjMzc3MGYtNDY4Y2U1NWY=, TxId: 2025-10-24T16:27:55.867976Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4133:3805], ActorId: [2:4134:3806], Got response [2:4135:3807] SUCCESS 2025-10-24T16:27:55.868055Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:55.890238Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:55.890267Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:56.285176Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:27:56.285209Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:56.768207Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:56.768245Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:56.768429Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:56.781565Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:56.781706Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:56.781717Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:56.803557Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:57.741221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:57.741255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:57.741259Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:57.741324Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:57.741570Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:27:57.741596Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:27:57.753388Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:27:58.753421Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:58.753454Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:58.753576Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:58.765217Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:27:58.817857Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:58.817897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:58.817903Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:59.827959Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:59.828060Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:27:59.828089Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:27:59.849583Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:59.849651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:59.849659Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:59.849908Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:59.861750Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:59.861894Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:59.861918Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:59.862080Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:59.884050Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:59.884140Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:59.884346Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4306:3896], server id = [2:4307:3897], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:59.884377Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4306:3896], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:59.884744Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:59.884763Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:59.884798Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:59.884828Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:59.884922Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4309:3899], ActorId: [2:4310:3900], Starting query actor #1 [2:4311:3901] 2025-10-24T16:27:59.884937Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4310:3900], ActorId: [2:4311:3901], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:59.885567Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4306:3896], server id = [2:4307:3897], tablet id = 72075186224037899 2025-10-24T16:27:59.885579Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:59.885743Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4310:3900], ActorId: [2:4311:3901], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NThjOTM5Ni02ZWM4ZjA5MS03NWJiMjMyMS02ZjAzM2VjNg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:59.910206Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4310:3900], ActorId: [2:4311:3901], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NThjOTM5Ni02ZWM4ZjA5MS03NWJiMjMyMS02ZjAzM2VjNg==, TxId: 2025-10-24T16:27:59.910235Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4310:3900], ActorId: [2:4311:3901], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NThjOTM5Ni02ZWM4ZjA5MS03NWJiMjMyMS02ZjAzM2VjNg==, TxId: 2025-10-24T16:27:59.910336Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4309:3899], ActorId: [2:4310:3900], Got response [2:4311:3901] SUCCESS 2025-10-24T16:27:59.910509Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:59.922261Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:59.922292Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3108:3334] |61.1%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::UpdateOnSecondary-UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 26898, MsgBus: 15664 2025-10-24T16:27:54.524310Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825862864450890:2146];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.524354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020b2/r3tmp/tmpdTPdSM/pdisk_1.dat 2025-10-24T16:27:54.559457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.569468Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.571062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825862864450773:2081] 1761323274523650 != 1761323274523653 TServer::EnableGrpc on GrpcPort 26898, node 1 2025-10-24T16:27:54.578142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.578152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.578154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.578198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15664 TClient is connected to server localhost:15664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.630505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.630531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.631582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.632819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.637445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:27:54.656261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.675137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.686334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.718699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.855645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862864452419:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.855668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.855715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862864452429:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.855723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.893579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.901153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.907706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.915106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.921801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.936777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.950398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.959684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.973830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862864453292:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.973870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.973884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862864453297:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.973903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825862864453299:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.973914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.974596Z node 1 :FLAT_TX_SCHEMESHARD ... { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.175969Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.177925Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:27:59.189011Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.209258Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.257073Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.275185Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.359509Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:59.539407Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825884501540541:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.539432Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.539555Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825884501540550:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.539573Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.553378Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.564667Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.578930Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.593410Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.609289Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.622746Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.634378Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.649427Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.672805Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825884501541411:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.672834Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.672893Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825884501541416:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.672901Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825884501541417:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.672922Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.673883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:59.677090Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825884501541420:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:27:59.747544Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825884501541472:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } waiting... 2025-10-24T16:27:59.976816Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.032605Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.047012Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.095504Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpStats::JoinNoStatsYql >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> KqpStats::JoinNoStatsScan >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-10-24T16:27:21.990813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.013187Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:22.013260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:22.013286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a7f/r3tmp/tmpSiqdvN/pdisk_1.dat 2025-10-24T16:27:22.110800Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.143187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.143234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.176083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6716, node 1 2025-10-24T16:27:22.263555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:22.263582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:22.263588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:22.263779Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:22.267520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:22.296468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21955 2025-10-24T16:27:22.659724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.525787Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.528994Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.529976Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.543472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.543516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.567045Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.567575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.653776Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.653817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.669729Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:23.738625Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:23.760644Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.760935Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761135Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761265Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761284Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761307Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761361Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.761406Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:23.844099Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.849924Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:23.849962Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:23.856241Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:23.856569Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:23.856597Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:23.856602Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:23.856609Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:23.856615Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:23.856620Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:23.856628Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:23.856893Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:23.857098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:23.857404Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:23.859217Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:23.859232Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:23.859244Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:23.863668Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:23.864579Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:23.866005Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.866046Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:23.867731Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:23.867805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:23.943416Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:23.964873Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:24.099814Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:24.101477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:24.105054Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:24.105081Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:24.210538Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:24.459930Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:25.100160Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.201002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.201043Z n ... TATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:58.127425Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:58.139745Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4154:3818] 2025-10-24T16:27:58.139832Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4154:3818], schemeshard id = 72075186224037897 2025-10-24T16:27:58.139882Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4111:3791], server id = [2:4155:3819], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:58.139890Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4155:3819] 2025-10-24T16:27:58.139914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:4155:3819], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-10-24T16:27:58.193694Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:58.193768Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:27:58.193927Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4160:3824], server id = [2:4161:3825], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:58.193973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4160:3824], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:58.195218Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:58.195238Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:58.195283Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:58.195307Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:58.195363Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4165:3828], ActorId: [2:4166:3829], Starting query actor #1 [2:4167:3830] 2025-10-24T16:27:58.195371Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4166:3829], ActorId: [2:4167:3830], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:58.196003Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4160:3824], server id = [2:4161:3825], tablet id = 72075186224037899 2025-10-24T16:27:58.196013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:58.196106Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4166:3829], ActorId: [2:4167:3830], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OGI5ODFlZDgtYjRjODg4MDktNWU1ZjU4MDUtYmViOTNmMjM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:27:58.201929Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4176:3839]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:27:58.202006Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:27:58.202013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4176:3839], StatRequests.size() = 1 2025-10-24T16:27:58.229836Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4166:3829], ActorId: [2:4167:3830], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGI5ODFlZDgtYjRjODg4MDktNWU1ZjU4MDUtYmViOTNmMjM=, TxId: 2025-10-24T16:27:58.229866Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4166:3829], ActorId: [2:4167:3830], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGI5ODFlZDgtYjRjODg4MDktNWU1ZjU4MDUtYmViOTNmMjM=, TxId: 2025-10-24T16:27:58.229965Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4165:3828], ActorId: [2:4166:3829], Got response [2:4167:3830] SUCCESS 2025-10-24T16:27:58.230063Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:58.252310Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.252355Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:58.337181Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:4201:3847] 2025-10-24T16:27:58.337436Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:22: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3109:3334] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-10-24T16:27:58.337448Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:38: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3109:3334] 2025-10-24T16:27:58.337464Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:100: [72075186224037894] TTxAnalyze::Complete 2025-10-24T16:27:58.630459Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-10-24T16:27:58.630495Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:27:59.115587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:59.115630Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:59.115634Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:27:59.970787Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:59.970857Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:59.970865Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:59.971166Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:59.982947Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:59.983126Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:59.983148Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:59.983315Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:59.995296Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:27:59.995415Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-10-24T16:27:59.995716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4265:3882], server id = [2:4266:3883], tablet id = 72075186224037899, status = OK 2025-10-24T16:27:59.995776Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4265:3882], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:27:59.996299Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:27:59.996328Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:27:59.996363Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:27:59.996395Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:27:59.996494Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4268:3885], ActorId: [2:4269:3886], Starting query actor #1 [2:4270:3887] 2025-10-24T16:27:59.996511Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4269:3886], ActorId: [2:4270:3887], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:27:59.997411Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4265:3882], server id = [2:4266:3883], tablet id = 72075186224037899 2025-10-24T16:27:59.997424Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:27:59.997628Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4269:3886], ActorId: [2:4270:3887], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NmI2ZjhhZDMtOWNjNGUxM2EtMzI2OGZiMi05NmQ4NTkyNQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:00.012153Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4269:3886], ActorId: [2:4270:3887], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmI2ZjhhZDMtOWNjNGUxM2EtMzI2OGZiMi05NmQ4NTkyNQ==, TxId: 2025-10-24T16:28:00.012183Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4269:3886], ActorId: [2:4270:3887], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmI2ZjhhZDMtOWNjNGUxM2EtMzI2OGZiMi05NmQ4NTkyNQ==, TxId: 2025-10-24T16:28:00.012283Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4268:3885], ActorId: [2:4269:3886], Got response [2:4270:3887] SUCCESS 2025-10-24T16:28:00.012373Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:00.034618Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:28:00.034648Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3109:3334] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: Trying to start YDB, gRPC: 5189, MsgBus: 4010 2025-10-24T16:27:53.972431Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825855638892348:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:53.972601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020e1/r3tmp/tmpSZ2Us0/pdisk_1.dat 2025-10-24T16:27:54.003770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.004011Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825855638892319:2081] 1761323273971937 != 1761323273971940 2025-10-24T16:27:54.004950Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5189, node 1 2025-10-24T16:27:54.010704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.010713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.010714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.010742Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4010 TClient is connected to server localhost:4010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.078380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.078401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.079448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.080370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.086094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.100312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.114971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.124402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.248030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825859933861240:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.248048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.248099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825859933861250:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.248102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.265916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.290427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.298566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.306201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.320366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.334364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.348960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.362255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.376304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.392832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825859933862134:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.392858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.392862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825859933862139:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.392883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825859933862141:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.392887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.393503Z node 1 :FLAT_TX_SCHEMESHARD WARN ... default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.200161Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:00.202879Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825889414881482:2625], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:28:00.272847Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825889414881536:5094] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } query_phases { duration_us: 12535 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 11909 affected_shards: 100 } compilation { duration_us: 18494 cpu_time_us: 17661 } process_cpu_time_us: 175 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"FinishedTasks\":4,\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"DurationUs\":{\"Count\":4,\"Sum\":36000,\"Max\":10000,\"Min\":8000},\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[0,4194304,10,4194304]},\"BaseTimeMs\":1761323280294,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004,\"History\":[8,2004,9,4018,10,8168]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":33000,\"Max\":9000,\"Min\":8000}},\"Name\":\"4\",\"Push\":{\"LastMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":33000,\"Max\":9000,\"Min\":8000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":34491,\"Max\":9183,\"Min\":7954,\"History\":[8,7954,9,16484,10,34491]},\"WaitPeriods\":{\"Count\":4,\"Sum\":4,\"Max\":1,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":4,\"Sum\":1785,\"Max\":531,\"Min\":352,\"History\":[0,366,8,744,9,1087,10,1785]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[8,8768,9,17600,10,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":33000,\"Max\":9000,\"Min\":8000}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":34,\"Max\":9,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":1,\"Max\":1,\"Min\":0},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[8,8768,9,17600,10,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":33000,\"Max\":9000,\"Min\":8000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":35090,\"Max\":9189,\"Min\":7962,\"History\":[8,7962,9,16715,10,35090]},\"WaitPeriods\":{\"Count\":4,\"Sum\":4,\"Max\":1,\"Min\":1}}}],\"StageDurationUs\":10000,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":29874,\"Max\":7940,\"Min\":6741,\"History\":[8,6741,9,14248,10,29874]},\"OutputBytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"UpdateTimeMs\":10,\"Tasks\":4}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":10000,\"Max\":10000,\"Min\":10000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[0,1048576,10,1048576]},\"BaseTimeMs\":1761323280294,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":10,\"Max\":10,\"Min\":10},\"ActiveMessageMs\":{\"Count\":1,\"Max\":10,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":7690,\"Max\":7690,\"Min\":7690,\"History\":[10,7690]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000}},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":10,\"Max\":10,\"Min\":10},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":9,\"Max\":9,\"Min\":9},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveMessageMs\":{\"Count\":1,\"Max\":10,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":8628,\"Max\":8628,\"Min\":8628,\"History\":[10,8628]},\"WaitPeriods\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":1535,\"Max\":1535,\"Min\":1535,\"History\":[0,95,10,1535]},\"StageDurationUs\":10000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7690,\"Max\":7690,\"Min\":7690},\"OutputBytes\":{\"Count\":1,\"Sum\":7690,\"Max\":7690,\"Min\":7690},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":27,\"Max\":27,\"Min\":27},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":9,\"Max\":9,\"Min\":9},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[10,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":9,\"Max\":9,\"Min\":9},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[10,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":9000,\"Max\":9000,\"Min\":9000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":2114,\"Max\":2114,\"Min\":2114,\"History\":[10,2114]},\"WaitPeriods\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4}}}],\"UpdateTimeMs\":10,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":18494,\"CpuTimeUs\":17661},\"ProcessCpuTimeUs\":175,\"TotalDurationUs\":106565,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":74326},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"2e01afcc-89d20ee4-d63090d8-38db3efa\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"76cdeebb-2c7c65ed-10e0205-a6d0760a\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 106565 total_cpu_time_us: 29745 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1761323280\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"2237fe74-74ea8dc6-2e5ff9a5-e16a890b\",\"version\":\"1.0\"}" >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> KqpLimits::BigParameter >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> KqpParams::RowsList [GOOD] >> KqpParams::MissingParameter >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpTypes::UnsafeTimestampCastV0 >> KqpExplain::UpdateSecondaryConditional-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpLimits::TooBigQuery+useSink >> TKesusTest::TestAcquireWaiterDowngrade >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> TKesusTest::TestAcquireSemaphoreTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29876, MsgBus: 8530 2025-10-24T16:27:55.120374Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825864116525366:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:55.120388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020a9/r3tmp/tmpKlRnmj/pdisk_1.dat 2025-10-24T16:27:55.151570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:55.161792Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825864116525328:2081] 1761323275120027 != 1761323275120030 2025-10-24T16:27:55.164868Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29876, node 1 2025-10-24T16:27:55.170350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.170362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.170364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:55.170413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8530 TClient is connected to server localhost:8530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:55.227246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:55.227276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:55.228320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:55.229392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:55.238137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.259611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.292932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.303430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.365577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:55.520525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864116526977:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.520554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.520616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864116526987:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.520620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.558369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.564720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.572891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.580048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.587114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.594805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.608595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.622436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.639264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864116527848:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.639282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.639313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864116527856:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.639312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825864116527854:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.639319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.640163Z node 1 :FLAT_TX_SCHEMESHARD WA ... lterResource ok# false data# peer# 2025-10-24T16:28:01.414909Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b39c080] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2025-10-24T16:28:01.414947Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb67d00] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2025-10-24T16:28:01.414952Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b385480] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2025-10-24T16:28:01.414986Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae1500] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2025-10-24T16:28:01.414995Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae2f00] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2025-10-24T16:28:01.415030Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3a7d00] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2025-10-24T16:28:01.415040Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fada680] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2025-10-24T16:28:01.415074Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb6a400] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2025-10-24T16:28:01.415086Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb5da80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2025-10-24T16:28:01.415103Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb40d00] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2025-10-24T16:28:01.415136Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae5c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2025-10-24T16:28:01.415140Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fac7500] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2025-10-24T16:28:01.415174Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb60180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2025-10-24T16:28:01.415189Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3facaf80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2025-10-24T16:28:01.415207Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb45480] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2025-10-24T16:28:01.415229Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb5f480] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2025-10-24T16:28:01.415241Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3b8780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2025-10-24T16:28:01.415274Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3a7000] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2025-10-24T16:28:01.415275Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faf4000] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2025-10-24T16:28:01.415308Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb48f00] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2025-10-24T16:28:01.415319Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb74680] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2025-10-24T16:28:01.415339Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fadf480] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2025-10-24T16:28:01.415363Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb59980] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2025-10-24T16:28:01.415371Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae2200] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2025-10-24T16:28:01.415403Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb45b00] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2025-10-24T16:28:01.415409Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae6980] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2025-10-24T16:28:01.415432Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3ad180] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2025-10-24T16:28:01.415447Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d38009c00] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2025-10-24T16:28:01.415463Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb5cd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2025-10-24T16:28:01.415487Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3a3580] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2025-10-24T16:28:01.415495Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faf3980] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2025-10-24T16:28:01.415531Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb43400] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2025-10-24T16:28:01.415530Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb47500] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2025-10-24T16:28:01.415569Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3b7400] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2025-10-24T16:28:01.415574Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3acb00] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2025-10-24T16:28:01.415610Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae7000] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2025-10-24T16:28:01.415631Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d38002700] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2025-10-24T16:28:01.415640Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb54b80] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2025-10-24T16:28:01.415667Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faff600] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2025-10-24T16:28:01.415673Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fad7f80] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2025-10-24T16:28:01.415705Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb42080] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2025-10-24T16:28:01.415723Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faf2c80] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2025-10-24T16:28:01.415742Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3ab100] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2025-10-24T16:28:01.415769Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb51780] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2025-10-24T16:28:01.415775Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b389580] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2025-10-24T16:28:01.415799Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faf3300] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2025-10-24T16:28:01.415817Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d38002d80] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2025-10-24T16:28:01.415828Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fad7900] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2025-10-24T16:28:01.415883Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3ba800] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2025-10-24T16:28:01.415935Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3800d680] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2025-10-24T16:28:01.415987Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae3580] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2025-10-24T16:28:01.416031Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fafd580] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2025-10-24T16:28:01.416081Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b387b80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2025-10-24T16:28:01.416134Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb5d400] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2025-10-24T16:28:01.416182Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fafcf00] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2025-10-24T16:28:01.416231Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb5b380] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2025-10-24T16:28:01.416266Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb40000] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2025-10-24T16:28:01.416304Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fadd400] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2025-10-24T16:28:01.416338Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3fae0180] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2025-10-24T16:28:01.416380Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3cb62f00] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2025-10-24T16:28:01.416419Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3b3be280] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2025-10-24T16:28:01.416465Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x704d3faf6080] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 16478, MsgBus: 16729 2025-10-24T16:27:56.891150Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825868659985711:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:56.891200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002095/r3tmp/tmpcns0Dr/pdisk_1.dat 2025-10-24T16:27:56.920992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:56.930152Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:56.930284Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825868659985608:2081] 1761323276890484 != 1761323276890487 TServer::EnableGrpc on GrpcPort 16478, node 1 2025-10-24T16:27:56.937224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:56.937238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:56.937240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:56.937288Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16729 TClient is connected to server localhost:16729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:56.998729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:56.998767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:56.999881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:57.001516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:57.015829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:57.035043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:57.056867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:57.066861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:57.123293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:57.245282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872954954549:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.245306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.245419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872954954559:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.245431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.306498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.314935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.324554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.337704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.351368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.366042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.379836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.393929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.410937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872954955426:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.410970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.411046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872954955431:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.411068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872954955432:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.411079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.411917Z node 1 :FLAT_TX_SCHEMESHARD ... 394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.021871Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.033142Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.044981Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.055201Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.068792Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.085801Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.098235Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.114566Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.127637Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.147927Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825893461709504:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.147981Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.148238Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825893461709509:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.148249Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825893461709510:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.148256Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.149252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.152189Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:28:01.152282Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825893461709513:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:01.228941Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825893461709565:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:01.526030Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323281564, txId: 281474976715673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[0,36],"Min":36}},"Name":"4","Push":{"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":424,"Sum":424,"History":[0,424],"Min":424},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[0,1048576],"Min":1048576},"Introspections":["1 tasks for a single\/sequential source scan"],"Tasks":1,"FinishedTasks":1,"OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1761323281522,"Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"CpuTimeUs":{"Count":1,"Max":297,"Sum":297,"History":[0,297],"Min":297},"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[0,144],"Min":144}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":394,"Sum":394,"History":[0,394],"Min":394},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[0,144],"Min":144}}}]}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Max":1000,"Sum":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[0,1048576,1,1048576],"Min":1048576},"BaseTimeMs":1761323281522,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[1,36],"Min":36}},"Name":"RESULT","Push":{"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":629,"Sum":629,"History":[1,629],"Min":629},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3}}}],"CpuTimeUs":{"Count":1,"Max":207,"Sum":207,"History":[0,170,1,207],"Min":207},"StageDurationUs":1000,"ResultRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResultBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[1,36],"Min":36}},"Name":"2","Push":{"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":609,"Sum":609,"History":[1,609],"Min":609},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[1,36],"Min":36}}}],"Tasks":1,"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"UpdateTimeMs":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":37277,"ProcessCpuTimeUs":108,"Compilation":{"FromCache":false,"CpuTimeUs":29872,"DurationUs":30706}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Rows":3,"A-SelfCpu":0.207,"A-Size":36,"A-Cpu":0.504,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":3,"A-SelfCpu":0.297,"A-Size":36,"A-Cpu":0.297,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} 2025-10-24T16:28:01.562488Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> KqpParams::InvalidJson [GOOD] |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |61.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 20666, MsgBus: 19289 2025-10-24T16:27:58.149799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825878039705002:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:58.149866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:58.152572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00208f/r3tmp/tmptqKe7k/pdisk_1.dat 2025-10-24T16:27:58.182386Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:58.182581Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825878039704891:2081] 1761323278149008 != 1761323278149011 TServer::EnableGrpc on GrpcPort 20666, node 1 2025-10-24T16:27:58.194541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:58.194553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:58.194555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:58.194595Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19289 TClient is connected to server localhost:19289 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:27:58.254361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:58.254390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:58.255334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:58.267045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:58.278501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.281320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:27:58.293083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.312712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.335532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.346985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.518711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878039706542:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.518744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.518817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878039706552:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.518828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.572398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.581020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.591204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.604963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.619253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.633020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.646764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.661320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.678325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878039707415:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.678355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.678365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878039707420:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.678391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878039707422:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.678398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, st ... Service] [TPoolFetcherActor] ActorId: [3:7564825890063121265:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.027665Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825890063121255:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.027675Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.027860Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825890063121294:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.027878Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.028530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.030955Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564825890063121293:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-10-24T16:28:01.121735Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564825890063121346:2348] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:01.126338Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.179599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.224014Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:28:01.228370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 1384, MsgBus: 12175 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00208f/r3tmp/tmpJvFSwS/pdisk_1.dat 2025-10-24T16:28:01.549160Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:01.549196Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:01.560921Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:01.563164Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:7564825890264403225:2081] 1761323281545059 != 1761323281545062 2025-10-24T16:28:01.565110Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:01.565140Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:01.565882Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1384, node 4 2025-10-24T16:28:01.572910Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:01.572920Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:01.572921Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:01.572971Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12175 TClient is connected to server localhost:12175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.622312Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.764401Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.993725Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825890264403882:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.993760Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.993863Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825890264403907:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.993874Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.993882Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825890264403908:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.994826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.997246Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825890264403911:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:02.086704Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825894559371258:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:02.091728Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.145193Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [4:7564825894559371397:2343], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-10-24T16:28:02.146093Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=4&id=ZGM0N2MyNTYtNDU5Njk5N2MtYTVmNjc5YzgtZjAzOTE3MzM=, ActorId: [4:7564825894559371395:2342], ActorState: ExecuteState, TraceId: 01k8bgrvpw82vz0ze3ft4da7tp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 27848, MsgBus: 20757 2025-10-24T16:27:56.622081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825872122979538:2152];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:56.622156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002098/r3tmp/tmpMUawOQ/pdisk_1.dat 2025-10-24T16:27:56.667665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:56.680390Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:56.683990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825872122979418:2081] 1761323276621265 != 1761323276621268 TServer::EnableGrpc on GrpcPort 27848, node 1 2025-10-24T16:27:56.694998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:56.695013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:56.695015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:56.695062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20757 2025-10-24T16:27:56.726067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:56.726101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:56.727121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:56.744211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:56.746112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:27:56.754411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.777522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.803219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:27:56.817127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.936629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:56.969308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872122981067:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.969335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.969432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825872122981077:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.969442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.028738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.036845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.050881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.064362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.078815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.093269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.106565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.120941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:57.136698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876417949236:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.136717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.136744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876417949241:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.136758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876417949242:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:57.136766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... server localhost:23034 TClient is connected to server localhost:23034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.569756Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.667542Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.700254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.714926Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.742244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.757168Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.850212Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825891671973976:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.850236Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.850285Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825891671973986:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.850290Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.861754Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.869965Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.880828Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.895173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.911288Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.925413Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.937542Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.952597Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.975861Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825891671974849:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.975891Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.976007Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825891671974854:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.976016Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825891671974855:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.976087Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.977020Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.986978Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825891671974858:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:02.045926Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825895966942206:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:02.304866Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.329352Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=MTVkZTkxOWEtN2M5YTk0MmQtN2QyZjY2MzItMjdlZjU1YzE=, ActorId: [5:7564825895966942501:2521], ActorState: ExecuteState, TraceId: 01k8bgrvwc167cr4327n9wbswb, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1223: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1223: Invalid Json value 2025-10-24T16:28:02.471508Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11445, MsgBus: 2581 2025-10-24T16:27:55.837340Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825863889170770:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:55.837355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00209e/r3tmp/tmpjAGkLX/pdisk_1.dat 2025-10-24T16:27:55.863832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:55.874214Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:55.874375Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825863889170739:2081] 1761323275837082 != 1761323275837085 TServer::EnableGrpc on GrpcPort 11445, node 1 2025-10-24T16:27:55.884815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.884828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.884830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:55.884877Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2581 TClient is connected to server localhost:2581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:55.940388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:55.940419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:55.941393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:55.946268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:55.954286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.972043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.993538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.004515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.078682Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:56.166484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868184139682:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.166505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.166563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868184139692:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.166570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.206688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.213741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.223662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.230794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.238413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.252325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.259205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.273421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.289245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868184140554:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.289271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.289276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868184140559:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.289313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868184140561:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.289324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.289955Z node 1 :FLAT_TX_SCHEMESHARD WA ... at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.557603Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.709847Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825893349300291:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.709873Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.709936Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825893349300300:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.709949Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.722152Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.734075Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.742297Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.755616Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.768735Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.783235Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.796956Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.811635Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.829383Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825893349301162:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.829412Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.829444Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825893349301167:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.829474Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825893349301168:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.829502Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.830367Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.837995Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825893349301171:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:01.909602Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825893349301223:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:02.186401Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.342182Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:02.356522Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [5:7564825897644269023:2568], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-10-24T16:28:02.356647Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=5&id=Y2NiNWY2OGYtYzc4ZDIwMjUtODg3NzFlY2MtZGE0NTRkZDM=, ActorId: [5:7564825897644269021:2567], ActorState: ExecuteState, TraceId: 01k8bgrvxg7ceaebr522n9y7pz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:02.375347Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=OTNjZmMzZGMtYzAzOTVkNTQtNmM0NDIxOWItZjc1MWZkMzE=, ActorId: [5:7564825897644269027:2570], ActorState: ExecuteState, TraceId: 01k8bgrvxn167cfdcxpg3yq9h3, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1473: ydb/core/kqp/query_data/kqp_query_data.cpp:259: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-10-24T16:28:02.381688Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [5:7564825897644269042:2576], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-10-24T16:28:02.381797Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=5&id=NDAzMzE5ZWYtYzRiZjQ4ZmItYzcwYzE0YTctZmY0YzRlMzI=, ActorId: [5:7564825897644269040:2575], ActorState: ExecuteState, TraceId: 01k8bgrvy8170vhqwmveq1fejz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:02.387778Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [5:7564825897644269053:2581], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-10-24T16:28:02.387912Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=5&id=ZGY0MzU4ZjItZjZiZmYyYmYtODIxOWZmMC1jZTI5NTM1OQ==, ActorId: [5:7564825897644269051:2580], ActorState: ExecuteState, TraceId: 01k8bgrvye14mbzxyvwh6v8pb9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> TKesusTest::TestAcquireUpgrade >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> KqpStats::CreateTableAsStats-IsOlap [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TKesusTest::TestQuoterResourceDescribe >> KqpQuery::DeleteWhereInSubquery [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-10-24T16:28:02.213124Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.213160Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.220288Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.220358Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.248163Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.248345Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=6099372307710914188, session=0, seqNo=0) 2025-10-24T16:28:02.248387Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:02.261296Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=6099372307710914188, session=1) 2025-10-24T16:28:02.261434Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=3179676670179850002, session=0, seqNo=0) 2025-10-24T16:28:02.261476Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:02.273275Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=3179676670179850002, session=2) 2025-10-24T16:28:02.273399Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=111, session=1, semaphore="Lock1" count=1) 2025-10-24T16:28:02.273459Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:02.273477Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:02.284570Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:02.284700Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:02.284798Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:02.284815Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-10-24T16:28:02.295693Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=222) 2025-10-24T16:28:02.295729Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=333) 2025-10-24T16:28:02.295909Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=828106694494601033, name="Lock1") 2025-10-24T16:28:02.295944Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=828106694494601033) 2025-10-24T16:28:02.432820Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.432864Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.437389Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.437485Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.469525Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.469713Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=1862529255044009817, session=0, seqNo=0) 2025-10-24T16:28:02.469755Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:02.480711Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=1862529255044009817, session=1) 2025-10-24T16:28:02.480830Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=17730849496083661722, session=0, seqNo=0) 2025-10-24T16:28:02.480871Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:02.491832Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=17730849496083661722, session=2) 2025-10-24T16:28:02.491986Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:02.492038Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:02.492058Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:02.503094Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=111) 2025-10-24T16:28:02.503194Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:02.503272Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:02.514688Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2025-10-24T16:28:02.514742Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=333) 2025-10-24T16:28:02.514972Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2176], cookie=1816853777013214555, name="Lock1") 2025-10-24T16:28:02.515016Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2176], cookie=1816853777013214555) 2025-10-24T16:28:02.515142Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2179], cookie=12657193698597433599, name="Lock1") 2025-10-24T16:28:02.515158Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2179], cookie=12657193698597433599) 2025-10-24T16:28:02.701315Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.701359Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.709247Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.709312Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.730915Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.731103Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=10917700534584278729, session=0, seqNo=0) 2025-10-24T16:28:02.731143Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:02.742094Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=10917700534584278729, session=1) 2025-10-24T16:28:02.742201Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=8161618119754042663, session=0, seqNo=0) 2025-10-24T16:28:02.742241Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:02.753213Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=8161618119754042663, session=2) 2025-10-24T16:28:02.753427Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:02.753472Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:02.753491Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:02.765104Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=111) 2025-10-24T16:28:02.765221Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:02.765330Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:02.765345Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-10-24T16:28:02.776345Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=222) 2025-10-24T16:28:02.776382Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=333) 2025-10-24T16:28:02.776547Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:157:2179], cookie=490992145292796980, name="Lock1") 2025-10-24T16:28:02.776573Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:157:2179], cookie=490992145292796980) 2025-10-24T16:28:02.776635Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:160:2182], cookie=9602633970116572166, name="Lock1") 2025-10-24T16:28:02.776642Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:160:2182], cookie=9602633970116572166) 2025-10-24T16:28:02.780609Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.780647Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.780708Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.780966Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.828324Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.828420Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:02.828557Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:199:2212], cookie=14799234636507492491, name="Lock1") 2025-10-24T16:28:02.828583Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:199:2212], cookie=14799234636507492491) 2025-10-24T16:28:02.828699Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:207:2219], cookie=5001582885480266603, name="Lock1") 2025-10-24T16:28:02.828708Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:207:2219], cookie=5001582885480266603) 2025-10-24T16:28:02.954330Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.954375Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.966159Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.966755Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.992481Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.992691Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=15381878922221980475, session=0, seqNo=0) 2025-10-24T16:28:02.992742Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:03.004746Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=15381878922221980475, session=1) 2025-10-24T16:28:03.004879Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=504881033002143961, session=0, seqNo=0) 2025-10-24T16:28:03.004923Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:03.016123Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=504881033002143961, session=2) 2025-10-24T16:28:03.016255Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:03.016310Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:03.016330Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:03.028357Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=111) 2025-10-24T16:28:03.028475Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:03.028590Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:138:2162], cookie=333, name="Lock1") 2025-10-24T16:28:03.028605Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-10-24T16:28:03.042407Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=222) 2025-10-24T16:28:03.042450Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:138:2162], cookie=333) 2025-10-24T16:28:03.194159Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:03.194204Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:03.198651Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:03.198850Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:03.231036Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:03.232650Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=6001129006419292347, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-10-24T16:28:03.232729Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:03.243760Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=6001129006419292347) 2025-10-24T16:28:03.243954Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=9319370581006333319, path="/Root/Res", config={ }) 2025-10-24T16:28:03.244026Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-10-24T16:28:03.255084Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=9319370581006333319) 2025-10-24T16:28:03.255571Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 4844483762481199019. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:03.255608Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=4844483762481199019) 2025-10-24T16:28:03.255702Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:152:2174]. Cookie: 7953559894080153942. Data: { } 2025-10-24T16:28:03.255710Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:152:2174], cookie=7953559894080153942) 2025-10-24T16:28:03.296727Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:03.348035Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:03.380440Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:03.421453Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:03.462735Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |61.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-10-24T16:27:23.279245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.313741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:23.313837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:23.313876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a19/r3tmp/tmpPlqizh/pdisk_1.dat 2025-10-24T16:27:23.450859Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:23.483741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.483770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.519764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10943, node 1 2025-10-24T16:27:23.606168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:23.606191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:23.606196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:23.606373Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:23.609540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:23.645145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2331 2025-10-24T16:27:24.000184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:24.812054Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:24.814637Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:24.815553Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:24.832895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:24.832926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:24.856991Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:24.857479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:24.943516Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:24.943578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:24.955672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:25.018884Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.040370Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040585Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040747Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040860Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040876Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040899Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040945Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040967Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.040989Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:25.137975Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:25.149764Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:25.149802Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:25.173655Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:25.174987Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:25.175023Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:25.175030Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:25.175037Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:25.175044Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:25.175051Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:25.175059Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:25.175452Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:25.175695Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:25.176097Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:25.179502Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:25.179518Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:25.179531Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:25.190440Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:25.191368Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:25.198908Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:25.198947Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:25.204996Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:25.205068Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:25.282127Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:25.294235Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:25.433525Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:25.435229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:25.436993Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:25.437046Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:25.534686Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:25.720649Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:26.317667Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:26.366940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2224:3059], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:26.366976Z n ... -24T16:27:59.769945Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:28:00.826479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:28:00.826554Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:28:00.826611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:28:00.847902Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:28:00.847936Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-10-24T16:28:00.847941Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:28:00.848160Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:28:00.859697Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:28:00.859840Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:28:00.859871Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:28:00.860042Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:00.883093Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:00.883172Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:28:00.883425Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4867:4253], server id = [2:4868:4254], tablet id = 72075186224037900, status = OK 2025-10-24T16:28:00.883463Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4867:4253], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-10-24T16:28:00.884884Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:28:00.884906Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:00.884946Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:00.884983Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:00.885067Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4872:4257], ActorId: [2:4873:4258], Starting query actor #1 [2:4874:4259] 2025-10-24T16:28:00.885080Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4873:4258], ActorId: [2:4874:4259], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:28:00.885757Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4867:4253], server id = [2:4868:4254], tablet id = 72075186224037900 2025-10-24T16:28:00.885771Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:00.885971Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4873:4258], ActorId: [2:4874:4259], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmI2YjY2MzktM2U0MTBmMDQtM2NjNDY3MC01NTY0YzY0Mg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:00.923258Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4873:4258], ActorId: [2:4874:4259], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmI2YjY2MzktM2U0MTBmMDQtM2NjNDY3MC01NTY0YzY0Mg==, TxId: 2025-10-24T16:28:00.923291Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4873:4258], ActorId: [2:4874:4259], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmI2YjY2MzktM2U0MTBmMDQtM2NjNDY3MC01NTY0YzY0Mg==, TxId: 2025-10-24T16:28:00.923403Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4872:4257], ActorId: [2:4873:4258], Got response [2:4874:4259] SUCCESS 2025-10-24T16:28:00.923510Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:00.935504Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:28:00.935527Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:28:00.956474Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:28:00.956517Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:28:00.956616Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 77, entries count: 3, are all stats full: 1 2025-10-24T16:28:00.974063Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:28:01.546334Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 3 is different from the current 0 2025-10-24T16:28:01.546374Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:28:02.020454Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:28:02.020491Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:28:02.020497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:28:03.055640Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:28:03.055716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-10-24T16:28:03.055725Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:28:03.056012Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:28:03.068280Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:28:03.068487Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:28:03.068514Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:28:03.068687Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:03.095050Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:03.095125Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-10-24T16:28:03.095320Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4962:4305], server id = [2:4963:4306], tablet id = 72075186224037900, status = OK 2025-10-24T16:28:03.095352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4962:4305], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-10-24T16:28:03.095692Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-10-24T16:28:03.095703Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:03.095737Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:03.095760Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:03.095816Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4965:4308], ActorId: [2:4966:4309], Starting query actor #1 [2:4967:4310] 2025-10-24T16:28:03.095826Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4966:4309], ActorId: [2:4967:4310], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:28:03.096511Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4962:4305], server id = [2:4963:4306], tablet id = 72075186224037900 2025-10-24T16:28:03.096525Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:03.096714Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4966:4309], ActorId: [2:4967:4310], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmM5MzBiNmItNDVjNDViNGUtN2MwODI5NjgtOTQ2MjY1ZGY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:03.116034Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4966:4309], ActorId: [2:4967:4310], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmM5MzBiNmItNDVjNDViNGUtN2MwODI5NjgtOTQ2MjY1ZGY=, TxId: 2025-10-24T16:28:03.116065Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4966:4309], ActorId: [2:4967:4310], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmM5MzBiNmItNDVjNDViNGUtN2MwODI5NjgtOTQ2MjY1ZGY=, TxId: 2025-10-24T16:28:03.116162Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4965:4308], ActorId: [2:4966:4309], Got response [2:4967:4310] SUCCESS 2025-10-24T16:28:03.116271Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:03.130299Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-10-24T16:28:03.130334Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3863:3599] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> KqpQuery::QueryCacheTtl [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpExplain::IdxFullscan [GOOD] >> KqpLimits::QueryReplySize [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn >> KqpExplain::MultiJoinCteLinks >> KqpLimits::CancelAfterRoTx >> KqpQuery::Pure [GOOD] >> KqpQuery::DictJoin >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> KqpStats::DeferredEffects-UseSink >> KqpQuery::QueryClientTimeout >> KqpParams::MissingOptionalParameter-UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink [GOOD] >> KqpQuery::QueryResultsTruncated >> TKesusTest::TestQuoterResourceCreation >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> KqpLimits::QueryExecTimeoutCancel >> KqpStats::JoinStatsBasicScan >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink |61.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |61.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |61.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |61.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |61.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-10-24T16:27:25.124675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:25.161588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:25.164551Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:25.164666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:25.164686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a0a/r3tmp/tmp28pUtT/pdisk_1.dat 2025-10-24T16:27:25.284091Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:25.316700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:25.316737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:25.349043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62799, node 1 2025-10-24T16:27:25.446277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:25.446305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:25.446310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:25.446416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:25.450745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:25.477555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20787 2025-10-24T16:27:25.955812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:26.812671Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:26.814706Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:26.831047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:26.831104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:26.869444Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:26.872228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:26.969209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:26.969276Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:26.969699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.969910Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970043Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970324Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970356Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970424Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.970475Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:26.996853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:27.061311Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:27.078182Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:27.078229Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:27.106201Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:27.106542Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:27.106576Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:27.106582Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:27.106589Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:27.106597Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:27.106604Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:27.106612Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:27.107567Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:27.117271Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:27.117305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1836:2589], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:27.118451Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1845:2595] 2025-10-24T16:27:27.118486Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1845:2595], schemeshard id = 72075186224037897 2025-10-24T16:27:27.134862Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1907:2620] 2025-10-24T16:27:27.136163Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-10-24T16:27:27.139414Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:27.139433Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:27.139447Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-10-24T16:27:27.150271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:27.165522Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:27.165578Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:27.223584Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:27.355689Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:27.379518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-10-24T16:27:27.467436Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:27.594879Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:28.293540Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:28.316379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:28.872426Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:28.887411Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-10-24T16:27:28.887454Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8270: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-10-24T16:27:28.887469Z node 2 :STATI ... 72075186224037899, LocalPathId: 2] is column table. 2025-10-24T16:28:01.758444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-10-24T16:28:01.758719Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:28:01.770346Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:28:01.770390Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:28:01.770560Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:28:01.770581Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:28:01.770777Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:21: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-10-24T16:28:01.770806Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:52: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-10-24T16:28:01.770926Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:01.794229Z node 2 :STATISTICS DEBUG: tx_analyze_table_response.cpp:57: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-10-24T16:28:01.794263Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:01.794336Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:28:01.794480Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4589:4006], server id = [2:4590:4007], tablet id = 72075186224037905, status = OK 2025-10-24T16:28:01.794520Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4589:4006], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:28:01.795449Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-10-24T16:28:01.795469Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:01.795524Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:01.795555Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:01.795635Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4594:4010], ActorId: [2:4595:4011], Starting query actor #1 [2:4596:4012] 2025-10-24T16:28:01.795647Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4595:4011], ActorId: [2:4596:4012], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-10-24T16:28:01.796308Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4589:4006], server id = [2:4590:4007], tablet id = 72075186224037905 2025-10-24T16:28:01.796320Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:01.796492Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4595:4011], ActorId: [2:4596:4012], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Yjc3YWJkZTgtNDg0Zjk5Y2YtMjU2MTkyZTMtZDA5OWM4YjA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:01.803956Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4605:4021]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:28:01.804052Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:28:01.804059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4605:4021], StatRequests.size() = 1 2025-10-24T16:28:01.834155Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4595:4011], ActorId: [2:4596:4012], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Yjc3YWJkZTgtNDg0Zjk5Y2YtMjU2MTkyZTMtZDA5OWM4YjA=, TxId: 2025-10-24T16:28:01.834192Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4595:4011], ActorId: [2:4596:4012], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Yjc3YWJkZTgtNDg0Zjk5Y2YtMjU2MTkyZTMtZDA5OWM4YjA=, TxId: 2025-10-24T16:28:01.834305Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4594:4010], ActorId: [2:4595:4011], Got response [2:4596:4012] SUCCESS 2025-10-24T16:28:01.834527Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:01.846025Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-10-24T16:28:01.846051Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:28:02.306716Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 2 is different from the current 0 2025-10-24T16:28:02.306753Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:28:02.743785Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:28:02.743828Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:28:02.743835Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:28:03.665980Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:28:03.666071Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:28:03.666127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:28:03.690060Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:28:03.690136Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-10-24T16:28:03.690143Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-10-24T16:28:03.690410Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:28:03.702041Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:28:03.702192Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:28:03.702247Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:28:03.702381Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:03.725354Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:03.725460Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-10-24T16:28:03.725655Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4703:4069], server id = [2:4704:4070], tablet id = 72075186224037905, status = OK 2025-10-24T16:28:03.725695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4703:4069], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-10-24T16:28:03.726154Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-10-24T16:28:03.726171Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:03.726220Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4703:4069], server id = [2:4704:4070], tablet id = 72075186224037905 2025-10-24T16:28:03.726225Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:03.726272Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:03.726322Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:03.726414Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4706:4072], ActorId: [2:4707:4073], Starting query actor #1 [2:4708:4074] 2025-10-24T16:28:03.726429Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4707:4073], ActorId: [2:4708:4074], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-10-24T16:28:03.727303Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4707:4073], ActorId: [2:4708:4074], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjcxNGUwNzQtMmMyN2RiYTUtNzRlZWRlNjEtMzI5OTI3MGE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:03.743665Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4707:4073], ActorId: [2:4708:4074], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjcxNGUwNzQtMmMyN2RiYTUtNzRlZWRlNjEtMzI5OTI3MGE=, TxId: 2025-10-24T16:28:03.743708Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4707:4073], ActorId: [2:4708:4074], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjcxNGUwNzQtMmMyN2RiYTUtNzRlZWRlNjEtMzI5OTI3MGE=, TxId: 2025-10-24T16:28:03.743833Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4706:4072], ActorId: [2:4707:4073], Got response [2:4708:4074] SUCCESS 2025-10-24T16:28:03.743971Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:03.756229Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-10-24T16:28:03.756269Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3562:3495] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4608, MsgBus: 18973 2025-10-24T16:27:58.121330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825876799009089:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:58.121363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002092/r3tmp/tmpdkljoc/pdisk_1.dat 2025-10-24T16:27:58.162112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:58.174586Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825876799009061:2081] 1761323278120990 != 1761323278120993 2025-10-24T16:27:58.177973Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4608, node 1 2025-10-24T16:27:58.183290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:58.183301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:58.183302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:58.183337Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18973 2025-10-24T16:27:58.228325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:58.228366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:58.229400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:58.243768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.274443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.292491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.313833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.325373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.436402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:58.514633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876799010707:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.514669Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.514754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876799010717:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.514773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.569082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.595754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.606746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.618420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.632992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.646596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.659964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.675140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.691426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876799011579:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.691467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.691476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876799011584:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.691500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825876799011586:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.691513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.692228Z node 1 :FLAT_TX_SCHEMESHARD W ... cation cookie mismatch for subscription [4:7564825900186719747:2081] 1761323283119834 != 1761323283119837 TServer::EnableGrpc on GrpcPort 6156, node 4 2025-10-24T16:28:03.150372Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:03.150388Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:03.150390Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:03.150447Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13282 TClient is connected to server localhost:13282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:03.212853Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:03.216093Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:03.221891Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.233997Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.256136Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.269523Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.336338Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:03.581231Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825900186721401:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.581253Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.581405Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825900186721411:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.581414Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.594370Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.651713Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.665945Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.680527Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.694845Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.709805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.722419Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.789477Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.818011Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825900186722277:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.818042Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.818205Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825900186722283:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.818225Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825900186722282:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.818235Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.818926Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:03.821871Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825900186722286:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:03.896214Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825900186722338:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:04.125873Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; [] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-10-24T16:28:04.004452Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.004506Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.010469Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.010538Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.033484Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.033752Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=1617327050463887114, session=0, seqNo=0) 2025-10-24T16:28:04.033813Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:04.044796Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=1617327050463887114, session=1) 2025-10-24T16:28:04.044908Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:139:2163], cookie=5690458454480854214, session=0, seqNo=0) 2025-10-24T16:28:04.044948Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:04.056041Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:139:2163], cookie=5690458454480854214, session=2) 2025-10-24T16:28:04.056384Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:04.056456Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:04.056488Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:04.067383Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:04.067502Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-10-24T16:28:04.067545Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-10-24T16:28:04.067573Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-10-24T16:28:04.078362Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=112) 2025-10-24T16:28:04.078462Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:138:2162], cookie=333, name="Lock1") 2025-10-24T16:28:04.078489Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-10-24T16:28:04.078530Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:139:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:04.078545Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-10-24T16:28:04.078554Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-10-24T16:28:04.078566Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:139:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-10-24T16:28:04.089479Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:138:2162], cookie=333) 2025-10-24T16:28:04.089517Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:139:2163], cookie=222) 2025-10-24T16:28:04.089526Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:139:2163], cookie=223) 2025-10-24T16:28:04.089600Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:138:2162], cookie=334, name="Lock2") 2025-10-24T16:28:04.089635Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-10-24T16:28:04.089648Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-10-24T16:28:04.100521Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:138:2162], cookie=334) 2025-10-24T16:28:04.100690Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:167:2189], cookie=4275388135706499711, name="Lock1") 2025-10-24T16:28:04.100720Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:167:2189], cookie=4275388135706499711) 2025-10-24T16:28:04.100772Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:170:2192], cookie=14437416822151962059, name="Lock2") 2025-10-24T16:28:04.100777Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:170:2192], cookie=14437416822151962059) 2025-10-24T16:28:04.103530Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.103566Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.103653Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.103717Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.148180Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.148248Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-10-24T16:28:04.148258Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-10-24T16:28:04.148387Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:209:2222], cookie=16255794310216377877, name="Lock1") 2025-10-24T16:28:04.148417Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:209:2222], cookie=16255794310216377877) 2025-10-24T16:28:04.148533Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:217:2229], cookie=9211801345801319047, name="Lock2") 2025-10-24T16:28:04.148540Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:217:2229], cookie=9211801345801319047) 2025-10-24T16:28:04.257438Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.257486Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.262151Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.262246Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.294348Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.294597Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=12179475042782791785, session=0, seqNo=0) 2025-10-24T16:28:04.294638Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:04.305572Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=12179475042782791785, session=1) 2025-10-24T16:28:04.305682Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2163], cookie=14170569986742808441, session=0, seqNo=0) 2025-10-24T16:28:04.305734Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:04.316664Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2163], cookie=14170569986742808441, session=2) 2025-10-24T16:28:04.316940Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:04.316998Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:04.317017Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:04.327960Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=111) 2025-10-24T16:28:04.328097Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-10-24T16:28:04.328150Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-10-24T16:28:04.328170Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-10-24T16:28:04.339128Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=112) 2025-10-24T16:28:04.339261Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=333, session=1, semaphore="Lock1" count=1) 2025-10-24T16:28:04.339350Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:04.339373Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-10-24T16:28:04.339392Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-10-24T16:28:04.350371Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=333) 2025-10-24T16:28:04.350406Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=222) 2025-10-24T16:28:04.350412Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=223) 2025-10-24T16:28:04.350554Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=365079743400044731, name="Lock1") 2025-10-24T16:28:04.350577Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=365079743400044731) 2025-10-24T16:28:04.350638Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2190], cookie=3282091435220523520, name="Lock2") 2025-10-24T16:28:04.350645Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2190], cookie=3282091435220523520) 2025-10-24T16:28:04.350702Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2193], cookie=8305597476758251723, name="Lock1") 2025-10-24T16:28:04.350708Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2193], cookie=8305597476758251723) 2025-10-24T16:28:04.350753Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2196], cookie=8181601192045259484, name="Lock2") 2025-10-24T16:28:04.350759Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2196], cookie=8181601192045259484) 2025-10-24T16:28:04.350793Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=444, session=2, semaphore="Lock2" count=1) 2025-10-24T16:28:04.350828Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-10-24T16:28:04.362842Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=444) 2025-10-24T16:28:04.363074Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:179:2201], cookie=6474890446516693924, name="Lock2") 2025-10-24T16:28:04.363107Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:179:2201], cookie=6474890446516693924) 2025-10-24T16:28:04.363187Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:182:2204], cookie=5411389798565550512, name="Lock2") 2025-10-24T16:28:04.363194Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:182:2204], cookie=5411389798565550512) 2025-10-24T16:28:04.366653Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.366692Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.366753Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.366911Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.409688Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.409748Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:04.409757Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-10-24T16:28:04.409762Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-10-24T16:28:04.409767Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-10-24T16:28:04.409890Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:221:2234], cookie=13613237447287777010, name="Lock1") 2025-10-24T16:28:04.409915Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:221:2234], cookie=13613237447287777010) 2025-10-24T16:28:04.410041Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:229:2241], cookie=11277539540966864489, name="Lock2") 2025-10-24T16:28:04.410048Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:229:2241], cookie=11277539540966864489) >> KqpQuery::DictJoin [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryExplain >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 64733, MsgBus: 29331 2025-10-24T16:27:59.182841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825881418540869:2192];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.182888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:59.189139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002083/r3tmp/tmpOHEYo3/pdisk_1.dat 2025-10-24T16:27:59.255823Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64733, node 1 2025-10-24T16:27:59.264555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.264563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.264564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.264604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29331 2025-10-24T16:27:59.285833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.285870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.286739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:59.323894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.335430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.338395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:27:59.661726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541373:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.661753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.661927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541383:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.661936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.709533Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825881418541396:2306] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2025-10-24T16:27:59.714538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541404:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.714565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.714605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541406:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.714611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.719090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.747351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541499:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.747378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.747492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881418541503:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.747508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.749138Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825881418541513:2376] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } Trying to start YDB, gRPC: 14724, MsgBus: 4323 2025-10-24T16:28:00.109665Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825886300238312:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.109695Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002083/r3tmp/tmpAKMOcN/pdisk_1.dat 2025-10-24T16:28:00.112733Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:00.125685Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.125902Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564825886300238270:2081] 1761323280109383 != 1761323280109386 2025-10-24T16:28:00.128555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.128581Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:00.129708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14724, node 2 2025-10-24T16:28:00.144726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.144743Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.144746Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.144800Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4323 TClient is connected to server localhost:4323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ... } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.722822Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.722922Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.722961Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723084Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723113Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723243Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723247Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723390Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723399Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723531Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723689Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.723833Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724079Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724242Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724413Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724583Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724607Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724768Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724840Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.724939Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-10-24T16:28:03.725099Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 31819, MsgBus: 14457 2025-10-24T16:27:58.120500Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825878328019818:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:58.120520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:58.123823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00208e/r3tmp/tmp6TRW66/pdisk_1.dat 2025-10-24T16:27:58.145194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:58.145221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:58.146213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:58.157874Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:58.158072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825878328019791:2081] 1761323278120075 != 1761323278120078 TServer::EnableGrpc on GrpcPort 31819, node 1 2025-10-24T16:27:58.171783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:58.171799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:58.171802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:58.171870Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14457 TClient is connected to server localhost:14457 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:27:58.250403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:58.260647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:58.282193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.302057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.324710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.338047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:58.606389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878328021439:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.606423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.606510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878328021449:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.606522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.673038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.681677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.695584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.709840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.723655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.737570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.751844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.766141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:58.782304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878328022312:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.782327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878328022317:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.782342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.782384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825878328022320:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.782395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:58.783104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... meUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000}},"Push":{"WaitTimeUs":{"Count":1,"Sum":252,"Max":252,"Min":252,"History":[2,252]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":28,"Max":28,"Min":28,"History":[2,28]}}}],"UseLlvm":"undefined","Table":[{"Path":"\/Root\/.tmp\/sessions\/27795b42-4f33-c4f0-6a83-cc9a9ed98d83\/Root\/Destination_5ba5cc13-4a1d-4361-dbb0-699edb62bc44"}],"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":7,"Max":7,"Min":7},"Introspections":["1 tasks same as previous stage"],"EgressBytes":{"Count":1,"Sum":64,"Max":64,"Min":64},"DurationUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[2,1048576]},"BaseTimeMs":1761323283648,"CpuTimeUs":{"Count":1,"Sum":163,"Max":163,"Min":163,"History":[2,163]},"EgressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"StageDurationUs":2000,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":7,"Max":7,"Min":7,"History":[2,7]}},"Name":"2","Push":{"WaitTimeUs":{"Count":1,"Sum":295,"Max":295,"Min":295,"History":[2,295]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":7,"Max":7,"Min":7,"History":[2,7]}}}],"UpdateTimeMs":2,"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":2337,"CpuTimeUs":1631},"ProcessCpuTimeUs":394,"TotalDurationUs":65389,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2025-10-24T16:28:03.661641Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) query_phases { duration_us: 2718 table_access { name: "/Root/.tmp/sessions/27795b42-4f33-c4f0-6a83-cc9a9ed98d83/Root/Destination_5ba5cc13-4a1d-4361-dbb0-699edb62bc44" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 1440 affected_shards: 1 } compilation { duration_us: 2337 cpu_time_us: 1631 } process_cpu_time_us: 394 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[0,7]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":337,\"Max\":337,\"Min\":337,\"History\":[0,337]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[0,1048576]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"BaseTimeMs\":1761323283648,\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"CpuTimeUs\":{\"Count\":1,\"Sum\":225,\"Max\":225,\"Min\":225,\"History\":[0,225]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[0,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":347,\"Max\":347,\"Min\":347,\"History\":[0,347]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[0,64]}}}]}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[2,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Splits\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":2000,\"Max\":2000,\"Min\":2000}},\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":252,\"Max\":252,\"Min\":252,\"History\":[2,252]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[2,28]}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/27795b42-4f33-c4f0-6a83-cc9a9ed98d83\\/Root\\/Destination_5ba5cc13-4a1d-4361-dbb0-699edb62bc44\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":2000,\"Max\":2000,\"Min\":2000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576]},\"BaseTimeMs\":1761323283648,\"CpuTimeUs\":{\"Count\":1,\"Sum\":163,\"Max\":163,\"Min\":163,\"History\":[2,163]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":2000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[2,7]}},\"Name\":\"2\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":295,\"Max\":295,\"Min\":295,\"History\":[2,295]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[2,7]}}}],\"UpdateTimeMs\":2,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":2337,\"CpuTimeUs\":1631},\"ProcessCpuTimeUs\":394,\"TotalDurationUs\":65389,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:6\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"a62d9bd3-7b3402ea-b6b80ad8-1dbe049a\") \'(\'\"_partition_mode\" \'\"single\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/27795b42-4f33-c4f0-6a83-cc9a9ed98d83/Root/Destination_5ba5cc13-4a1d-4361-dbb0-699edb62bc44\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"5c7b1830-ce9b0404-2aef851a-82e40e1a\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 65389 total_cpu_time_us: 3465 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/27795b42-4f33-c4f0-6a83-cc9a9ed98d83/Root/Destination_5ba5cc13-4a1d-4361-dbb0-699edb62bc44\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":11},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1761323283\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"61eb523-ad5e6fa-5119d134-2241b7ef\",\"version\":\"1.0\"}" 2025-10-24T16:28:03.664089Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-10-24T16:27:22.324137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:22.351822Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:22.351931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:22.351970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002a29/r3tmp/tmpl4Ajzi/pdisk_1.dat 2025-10-24T16:27:22.450411Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:22.485320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:22.485366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:22.528636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8219, node 1 2025-10-24T16:27:22.633275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:22.633297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:22.633301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:22.633461Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:22.637224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:22.668642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5098 2025-10-24T16:27:23.032975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:23.915005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:23.917814Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:23.918664Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:27:23.939916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:23.939957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:23.966353Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:23.972346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:24.116548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:24.116606Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:24.128968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:24.193037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:24.214702Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.214932Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215097Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215216Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215234Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215257Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215334Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.215355Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:24.282257Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:24.287609Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:24.287637Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:24.293374Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:24.293793Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:24.293826Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:24.293833Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:24.293840Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:24.293846Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:24.293852Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:24.293860Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:24.294158Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:24.294361Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1635:2455] 2025-10-24T16:27:24.294669Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:24.297214Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:24.297232Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:24.297246Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:24.302299Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:1849:2583] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:24.303130Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:24.304583Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:24.304624Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1885:2606], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:24.306355Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2614] 2025-10-24T16:27:24.306415Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1900:2614], schemeshard id = 72075186224037897 2025-10-24T16:27:24.366169Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:24.387717Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:24.523989Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:24.525800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:24.528414Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:24.528447Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:24.613713Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:24.837107Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:25.345286Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:25.395873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2228:3062], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:25.395926Z no ... 2790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [2:3988:3726], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-10-24T16:27:56.332798Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-10-24T16:27:56.332834Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:3987:3725], server id = [2:3988:3726], tablet id = 72075186224037894, status = OK 2025-10-24T16:27:56.332849Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-10-24T16:27:56.332863Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [2:3985:3723], StatRequests.size() = 1 2025-10-24T16:27:56.332896Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-10-24T16:27:56.351320Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3975:3713], ActorId: [2:3976:3714], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODk3ZDM0ZmMtOWQ5OGM1NzItZmNiMmFjZjUtNGE0YTNlM2I=, TxId: 2025-10-24T16:27:56.351341Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3975:3713], ActorId: [2:3976:3714], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODk3ZDM0ZmMtOWQ5OGM1NzItZmNiMmFjZjUtNGE0YTNlM2I=, TxId: 2025-10-24T16:27:56.351423Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3974:3712], ActorId: [2:3975:3713], Got response [2:3976:3714] SUCCESS 2025-10-24T16:27:56.351470Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:27:56.362859Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-10-24T16:27:56.362881Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-10-24T16:27:56.467948Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-10-24T16:27:56.467976Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-10-24T16:27:56.531303Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3987:3725], schemeshard count = 1 2025-10-24T16:27:57.413284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:27:57.413319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:57.414269Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:57.439077Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:57.439249Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:57.439259Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:51: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-10-24T16:27:57.467794Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:27:58.469724Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:27:58.469762Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:707: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-10-24T16:27:58.469768Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:716: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-10-24T16:27:58.469777Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:27:58.469785Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:27:58.470067Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:27:58.481790Z node 2 :STATISTICS DEBUG: tx_analyze_table_request.cpp:56: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-10-24T16:27:58.481829Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:27:58.481996Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:27:58.482017Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR cookie 0 ... waiting for TEvAnalyzeTableResponse (done) 2025-10-24T16:27:58.482379Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:27:58.492908Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:28: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-10-24T16:27:58.919727Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:27:58.919803Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 1 2025-10-24T16:27:58.919829Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-10-24T16:27:58.940747Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:27:58.940774Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:27:58.940855Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 51, entries count: 2, are all stats full: 1 2025-10-24T16:27:59.747650Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-10-24T16:27:59.747683Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8321: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:59.747688Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8352: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-10-24T16:27:59.747693Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-10-24T16:28:03.441270Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:03.441325Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:46: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:3111:3334] 2025-10-24T16:28:03.441345Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:28:03.441398Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-10-24T16:28:03.441592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:4195:3814], server id = [2:4196:3815], tablet id = 72075186224037899, status = OK 2025-10-24T16:28:03.441625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:4195:3814], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:28:03.442405Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:28:03.442421Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:03.442455Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:4195:3814], server id = [2:4196:3815], tablet id = 72075186224037899 2025-10-24T16:28:03.442458Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:03.442476Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:03.442495Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:03.442555Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4200:3818], ActorId: [2:4201:3819], Starting query actor #1 [2:4202:3820] 2025-10-24T16:28:03.442564Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4201:3819], ActorId: [2:4202:3820], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:28:03.443126Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4201:3819], ActorId: [2:4202:3820], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NDJmZGRiYWItYjJmYjIyMTAtNzBlZGRlMGMtYzIzMzQyZmQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:03.449390Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4218:3830]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-10-24T16:28:03.449440Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-10-24T16:28:03.449446Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:4218:3830], StatRequests.size() = 1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=37;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=38;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=67217536-b0f611f0-a9c342a9-8b1c57bc; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=6336;delta=60960; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=6336;delta=60960; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=0;delta=6336; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=6336; >> KqpExplain::MultiJoinCteLinks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 10492, MsgBus: 4735 2025-10-24T16:27:59.472435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825881816197683:2139];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.472486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002077/r3tmp/tmpnOKiF5/pdisk_1.dat 2025-10-24T16:27:59.519963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:59.533054Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:59.543939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825881816197582:2081] 1761323279471300 != 1761323279471303 TServer::EnableGrpc on GrpcPort 10492, node 1 2025-10-24T16:27:59.555016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.555028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.555030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.555072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4735 2025-10-24T16:27:59.583900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.583940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.584915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.610884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.614283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:27:59.617996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.645102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:27:59.673110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.688328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.774223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:59.884085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881816199229:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.884110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.884228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881816199239:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.884241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.943353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.955691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.970412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.985403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.998617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.012391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.025924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.041434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.057004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886111167399:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.057035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.057085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886111167404:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.057092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886111167405:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.057100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... d 2025-10-24T16:28:03.739338Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:03.740642Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61710, node 4 2025-10-24T16:28:03.751080Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:03.751096Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:03.751098Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:03.751155Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16399 TClient is connected to server localhost:16399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:03.812805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:03.818076Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.829398Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.855324Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.874995Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:03.899127Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:04.127540Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903108751761:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.127566Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.127643Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903108751770:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.127656Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.142541Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.156366Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.170905Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.184103Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.198770Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.212481Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.226193Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.241001Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.264611Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903108752633:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.264645Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.264681Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903108752638:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.264688Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903108752639:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.264696Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.265656Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:04.268477Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825903108752642:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:04.350561Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825903108752694:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:04.710671Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> KqpParams::ParameterTypes >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> KqpStats::DeferredEffects-UseSink [GOOD] >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> KqpExplain::UpdateConditional+UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> KqpStats::DataQueryWithEffects-UseSink >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> TKesusTest::TestSessionDetach >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] >> TKesusTest::TestAttachNewSessions >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpQuery::QueryExplain [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> TKesusTest::TestReleaseLockFailure >> TKesusTest::TestKesusConfig >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore >> KqpQuery::QueryFromSqs >> TKesusTest::TestSessionDetach [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> TKesusTest::TestAttachNewSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestKesusConfig [GOOD] >> KqpExplain::UpdateConditional+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestLockNotFound >> KqpParams::ParameterTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 22802, MsgBus: 29743 2025-10-24T16:28:00.955021Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825885760096728:2244];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.955060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:00.957164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002056/r3tmp/tmpkOXkPe/pdisk_1.dat 2025-10-24T16:28:00.980172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.980201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:00.981069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:00.983123Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.983299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825885760096509:2081] 1761323280952526 != 1761323280952529 TServer::EnableGrpc on GrpcPort 22802, node 1 2025-10-24T16:28:01.001425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:01.001444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:01.001446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:01.001486Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29743 TClient is connected to server localhost:29743 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:01.078731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.090309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.096180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:01.382118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890055064457:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.382153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.382236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890055064489:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.382259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.382290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890055064492:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.383058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.390681Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825890055064494:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:01.444838Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825890055064545:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:01.489858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) Trying to start YDB, gRPC: 31743, MsgBus: 28841 2025-10-24T16:28:01.760146Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825890956130555:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:01.760172Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002056/r3tmp/tmppUviah/pdisk_1.dat 2025-10-24T16:28:01.764102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:01.775469Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:01.775743Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564825890956130531:2081] 1761323281759970 != 1761323281759973 2025-10-24T16:28:01.779707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:01.779730Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:01.781003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31743, node 2 2025-10-24T16:28:01.787653Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:01.787665Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:01.787668Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:01.787730Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28841 TClient is connected to server localhost:28841 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.848473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.858254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.869997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first ... profiles were not loaded 2025-10-24T16:28:04.147452Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:04.147479Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:04.148393Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16696, node 4 2025-10-24T16:28:04.153666Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:04.153679Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:04.153681Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:04.153732Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3173 TClient is connected to server localhost:3173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:04.225881Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:04.236076Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.252621Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.273596Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.286421Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.372358Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:04.538064Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903360323075:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.538106Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.538254Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903360323085:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.538264Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.550402Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.558778Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.568961Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.586246Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.596721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.611232Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.625384Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.639711Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.656823Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903360323948:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.656875Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903360323953:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.656875Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.656944Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825903360323955:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.656961Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.657778Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:04.666211Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825903360323956:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:04.762750Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825903360324009:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32051, MsgBus: 27973 2025-10-24T16:27:56.377444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825870744775906:2072];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:56.377743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00209d/r3tmp/tmpI7zhOR/pdisk_1.dat 2025-10-24T16:27:56.425545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:56.425738Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:56.426013Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825870744775865:2081] 1761323276377077 != 1761323276377080 TServer::EnableGrpc on GrpcPort 32051, node 1 2025-10-24T16:27:56.433918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:56.433928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:56.433929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:56.433965Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27973 TClient is connected to server localhost:27973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:27:56.480760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:56.480791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:56.481894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:56.486800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:56.499593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.518456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.542883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.564692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.645291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:56.753391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825870744777512:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.753422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.753507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825870744777522:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.753518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.804388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.815271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.825948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.833206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.840024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.854250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.861035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.876030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.891415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825870744778385:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.891435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825870744778390:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.891440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.891518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825870744778393:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.891532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.892153Z node 1 :FLAT_TX_SCHEMESHARD ... fe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:02.647770Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:02.743964Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:02.941214Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825895237692983:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.941238Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.941332Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825895237692993:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.941345Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.953051Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.962722Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.972437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.987091Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.001244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.014981Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.029312Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.043120Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.060425Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825899532661153:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.060452Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.060627Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825899532661159:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.060627Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825899532661158:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.060643Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:03.061429Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:03.071463Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825899532661162:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:03.156667Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825899532661214:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:03.424927Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.435313Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.449829Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:03.502096Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base_reboots/unittest |61.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |61.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-10-24T16:28:04.085857Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.085911Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.091168Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.091238Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.113717Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.115180Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:138:2162], cookie=13871971723500220249, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-10-24T16:28:04.115263Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:04.126761Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:138:2162], cookie=13871971723500220249) 2025-10-24T16:28:04.127009Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=17873411392737278877, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-10-24T16:28:04.127082Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-10-24T16:28:04.138170Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=17873411392737278877) 2025-10-24T16:28:04.138439Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2174], cookie=2117214458436084555, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:04.138515Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-10-24T16:28:04.149424Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2174], cookie=2117214458436084555) 2025-10-24T16:28:04.149642Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:157:2179], cookie=15646256542329690553, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:04.149708Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-10-24T16:28:04.162013Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:157:2179], cookie=15646256542329690553) 2025-10-24T16:28:04.162261Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:162:2184], cookie=6971801490155730510, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:04.162331Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-10-24T16:28:04.174731Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:162:2184], cookie=6971801490155730510) 2025-10-24T16:28:04.175026Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:167:2189], cookie=11609282943114464229, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:04.175103Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-10-24T16:28:04.187474Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:167:2189], cookie=11609282943114464229) 2025-10-24T16:28:04.187727Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:172:2194], cookie=11728704754517545284, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-10-24T16:28:04.187796Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2025-10-24T16:28:04.198929Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:172:2194], cookie=11728704754517545284) 2025-10-24T16:28:04.199144Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:177:2199], cookie=13453892267475899778, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:04.199213Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-10-24T16:28:04.212340Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:177:2199], cookie=13453892267475899778) 2025-10-24T16:28:04.212548Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2204], cookie=7694974906625807427, ids=[100], paths=[], recursive=0) 2025-10-24T16:28:04.212571Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2204], cookie=7694974906625807427) 2025-10-24T16:28:04.212633Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2207], cookie=7612606290230895435, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-10-24T16:28:04.212647Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2207], cookie=7612606290230895435) 2025-10-24T16:28:04.212690Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2210], cookie=3825028544128326602, ids=[], paths=[/Root, ], recursive=0) 2025-10-24T16:28:04.212699Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2210], cookie=3825028544128326602) 2025-10-24T16:28:04.212747Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2213], cookie=15272344345102855920, ids=[1, 1], paths=[], recursive=0) 2025-10-24T16:28:04.212751Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2213], cookie=15272344345102855920) 2025-10-24T16:28:04.212816Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2216], cookie=17321952913889238237, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-10-24T16:28:04.212821Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2216], cookie=17321952913889238237) 2025-10-24T16:28:04.212855Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2219], cookie=11030082484072250792, ids=[], paths=[], recursive=1) 2025-10-24T16:28:04.212864Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2219], cookie=11030082484072250792) 2025-10-24T16:28:04.212921Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2222], cookie=10170304566222005232, ids=[], paths=[], recursive=0) 2025-10-24T16:28:04.212928Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2222], cookie=10170304566222005232) 2025-10-24T16:28:04.212986Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2225], cookie=9015566234620197988, ids=[3, 2], paths=[], recursive=1) 2025-10-24T16:28:04.212995Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2225], cookie=9015566234620197988) 2025-10-24T16:28:04.213033Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2228], cookie=10721390278646552302, ids=[3, 2], paths=[], recursive=0) 2025-10-24T16:28:04.213037Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2228], cookie=10721390278646552302) 2025-10-24T16:28:04.213096Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:209:2231], cookie=12492071883292250923, ids=[], paths=[Root2/], recursive=1) 2025-10-24T16:28:04.213103Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:209:2231], cookie=12492071883292250923) 2025-10-24T16:28:04.213149Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:212:2234], cookie=14843462715087840477, ids=[], paths=[Root2/], recursive=0) 2025-10-24T16:28:04.213156Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:212:2234], cookie=14843462715087840477) 2025-10-24T16:28:04.219790Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:04.219835Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:04.219924Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:04.220120Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:04.264885Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:04.265038Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:251:2264], cookie=8744186387711453798, ids=[100], paths=[], recursive=0) 2025-10-24T16:28:04.265066Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:251:2264], cookie=8744186387711453798) 2025-10-24T16:28:04.265240Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:257:2269], cookie=10518723130441951917, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-10-24T16:28:04.265256Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:257:2269], cookie=10518723130441951917) 2025-10-24T16:28:04.265330Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:260:2272], cookie=8519939987948693180, ids=[], paths=[/Root, ], recursive=0) 2025-10-24T16:28:04.265343Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:260:2272], cookie=8519939987948693180) 2025-10-24T16:28:04.2 ... S_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-10-24T16:28:05.530489Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:157:2179], cookie=10993474743531272145) 2025-10-24T16:28:05.530697Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:162:2184], cookie=4115082817604316496, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.530729Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:162:2184], cookie=4115082817604316496) 2025-10-24T16:28:05.530857Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:168:2190], cookie=11970873667376172553, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.530864Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:168:2190], cookie=11970873667376172553) 2025-10-24T16:28:05.530966Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:174:2196], cookie=12457271613043435526, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.530972Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:174:2196], cookie=12457271613043435526) 2025-10-24T16:28:05.531029Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:177:2199], cookie=5059979777223282207, id=0, path="/Root/Folder/NonexistingRes") 2025-10-24T16:28:05.531043Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:177:2199], cookie=5059979777223282207) 2025-10-24T16:28:05.531096Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:180:2202], cookie=10126527038653950078, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.531107Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:180:2202], cookie=10126527038653950078) 2025-10-24T16:28:05.531164Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:183:2205], cookie=2436363089027032297, id=100, path="") 2025-10-24T16:28:05.531174Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:183:2205], cookie=2436363089027032297) 2025-10-24T16:28:05.531242Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:186:2208], cookie=5148194620375359550, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.531249Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:186:2208], cookie=5148194620375359550) 2025-10-24T16:28:05.531306Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:189:2211], cookie=198146692369185567, id=3, path="") 2025-10-24T16:28:05.531313Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:189:2211], cookie=198146692369185567) 2025-10-24T16:28:05.531394Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2214], cookie=3011465884129526825, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.531403Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2214], cookie=3011465884129526825) 2025-10-24T16:28:05.531461Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:195:2217], cookie=5614881424561586392, id=0, path="/Root/Folder/Q1") 2025-10-24T16:28:05.531507Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-10-24T16:28:05.544208Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:195:2217], cookie=5614881424561586392) 2025-10-24T16:28:05.544435Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:200:2222], cookie=6531172578433626098, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.544462Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:200:2222], cookie=6531172578433626098) 2025-10-24T16:28:05.557207Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:05.557253Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:05.557317Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:05.557520Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:05.610908Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:05.611064Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:239:2252], cookie=17520280055192297290, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.611093Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:239:2252], cookie=17520280055192297290) 2025-10-24T16:28:05.611260Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:245:2257], cookie=6974057599612674545, id=3, path="") 2025-10-24T16:28:05.611308Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-10-24T16:28:05.624065Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:245:2257], cookie=6974057599612674545) 2025-10-24T16:28:05.624293Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:250:2262], cookie=16114838527444363908, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.624322Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:250:2262], cookie=16114838527444363908) 2025-10-24T16:28:05.627402Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:05.627438Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:05.627496Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:05.627708Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:05.671994Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:05.672162Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:289:2292], cookie=17980373315856055167, ids=[], paths=[], recursive=1) 2025-10-24T16:28:05.672190Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:289:2292], cookie=17980373315856055167) 2025-10-24T16:28:05.903091Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:05.903128Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:05.907520Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:05.907723Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:05.941605Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:05.941791Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=4929751362048977232, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:05.941853Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2025-10-24T16:28:05.954718Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=4929751362048977232) 2025-10-24T16:28:05.954937Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=6854234796921039159, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-10-24T16:28:05.954998Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2025-10-24T16:28:05.965931Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=6854234796921039159) 2025-10-24T16:28:05.966464Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 12448713462822029676. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:05.966484Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=12448713462822029676) 2025-10-24T16:28:05.966642Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 9483812450781513907. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-10-24T16:28:05.966651Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=9483812450781513907) >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 12798, MsgBus: 19475 2025-10-24T16:27:59.084176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825881246813022:2145];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.084211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:59.087649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002085/r3tmp/tmp3juj6L/pdisk_1.dat 2025-10-24T16:27:59.121829Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:59.122010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825881246812906:2081] 1761323279083209 != 1761323279083212 TServer::EnableGrpc on GrpcPort 12798, node 1 2025-10-24T16:27:59.135776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.135788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.135790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.135838Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19475 2025-10-24T16:27:59.186783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.186811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.187756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19475 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:27:59.226549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.238356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.243006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:27:59.248178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.272669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.294126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.311178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.536104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881246814553:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.536145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.536372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881246814563:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.536382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.591045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.598837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.613762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.626797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.641690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.656721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.672030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.685458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.715274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881246815424:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.715315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.715334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881246815429:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.715348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825881246815431:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.715361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, st ... le, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.509971Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.526692Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.593651Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:04.732762Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825905871706998:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.732799Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.732935Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825905871707008:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.732951Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.747314Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.757222Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.768142Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.780133Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.792986Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.806223Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.821202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.835231Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.854062Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825905871707870:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.854089Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825905871707875:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.854093Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.854174Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825905871707878:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.854194Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:04.854846Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:04.861776Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825905871707877:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:04.960413Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825905871707931:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"E-Size":"0","PlanNodeId":6,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":6}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":3,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":11,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 21655, MsgBus: 8033 2025-10-24T16:28:01.959062Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825890461859063:2139];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:01.959212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002047/r3tmp/tmpszPZ49/pdisk_1.dat 2025-10-24T16:28:02.018182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:02.022572Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:02.022784Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825890461858962:2081] 1761323281957494 != 1761323281957497 TServer::EnableGrpc on GrpcPort 21655, node 1 2025-10-24T16:28:02.033612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:02.033626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:02.033629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:02.033681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8033 2025-10-24T16:28:02.072141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:02.072171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:02.073243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:02.095025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:02.100501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:02.106933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:02.131420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.158860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:02.175320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:02.199422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:02.369927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825894756827928:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.369953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.370069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825894756827938:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.370077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.443928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.451627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.461680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.475346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.489584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.504538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.520125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.531622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:02.549520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825894756828802:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.549547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.549550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825894756828807:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.549630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825894756828809:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.549646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... _current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.017765Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018369Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018388Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018391Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018472Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018487Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.018489Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019277Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019288Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019290Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019650Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019668Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.019671Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020042Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020056Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020057Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020737Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020753Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020754Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020794Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020809Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.020810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021745Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021760Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021762Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021872Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021887Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.021889Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022750Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022769Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022771Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022877Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022892Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.022894Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024084Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024102Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024145Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024166Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.024168Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-10-24T16:28:06.029973Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825915055634998:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.029995Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.030028Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825915055635000:2726], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.030032Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.033406Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825915055635009:3617] txid# 281474976715660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } 2025-10-24T16:28:06.198239Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2025-10-24T16:27:43.095873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.131826Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:43.131915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:43.131923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002995/r3tmp/tmpPvmLOf/pdisk_1.dat 2025-10-24T16:27:43.190487Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.191237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.191261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.191353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323262794655 != 1761323262794658 2025-10-24T16:27:43.222354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.268572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:43.301118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:43.384484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.398408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:43.398582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:43.398625Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:43.398664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:43.399393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:43.405585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:43.405618Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:43.405763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:43.405772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:43.405777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:43.405822Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:43.405839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:43.405849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:43.405901Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.409263Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:43.409320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.409338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:43.409342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:43.409346Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:43.409349Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:43.409395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.409402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.409484Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:43.409499Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:43.409514Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:43.409523Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.409529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:43.409533Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:43.409536Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:43.409541Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:43.409545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:43.409554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.409558Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.409562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2563], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:43.409636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:43.409641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.409656Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:43.409704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:43.409713Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:43.409724Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:43.409731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.409734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:43.409738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:43.409741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.409829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.409833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:43.409836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:43.409838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:43.409846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:43.409849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:43.409853Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:43.409856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:43.409859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.409977Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:43.409982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.409985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:43.409991Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-10-24T16:27:43.409998Z node 1 :TX_DATASHAR ... 63 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 2025-10-24T16:28:05.467921Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:05.467925Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:28:05.467930Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit FinishProposeWrite 2025-10-24T16:28:05.467934Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:28:05.467957Z node 14 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:28:05.467965Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-10-24T16:28:05.467969Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-10-24T16:28:05.467973Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:05.467978Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:05.467993Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:05.467998Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:05.468003Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037888 has finished 2025-10-24T16:28:05.468021Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:28:05.468026Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:28:05.468033Z node 14 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-10-24T16:28:05.468043Z node 14 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-10-24T16:28:05.468060Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:05.468129Z node 14 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:911: SelfId: [14:1004:2746], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [14:954:2746]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[14:1004:2746].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-10-24T16:28:05.468150Z node 14 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [14:997:2746], SessionActorId: [14:954:2746], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[14:954:2746]. 2025-10-24T16:28:05.468237Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=14&id=OTYwYTZmNjItZGVkY2UzM2EtMmE0YjllZi1mY2VjM2RjYw==, ActorId: [14:954:2746], ActorState: ExecuteState, TraceId: 01k8bgryye265c43sddbabqzg5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [14:998:2746] from: [14:997:2746] 2025-10-24T16:28:05.468273Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 278003712, Sender [14:997:2746], Recipient [14:880:2696]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-10-24T16:28:05.468278Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-10-24T16:28:05.468309Z node 14 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [14:998:2746] TxId: 281474976715665. Ctx: { TraceId: 01k8bgryye265c43sddbabqzg5, Database: , SessionId: ydb://session/3?node_id=14&id=OTYwYTZmNjItZGVkY2UzM2EtMmE0YjllZi1mY2VjM2RjYw==, PoolId: default, DatabaseId: /Root}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-10-24T16:28:05.468370Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435074, Sender [14:880:2696], Recipient [14:880:2696]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-10-24T16:28:05.468376Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3184: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-10-24T16:28:05.468386Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-10-24T16:28:05.468402Z node 14 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-10-24T16:28:05.468413Z node 14 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-10-24T16:28:05.468423Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-10-24T16:28:05.468430Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:28:05.468434Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-10-24T16:28:05.468439Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:05.468444Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:28:05.468451Z node 14 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2025-10-24T16:28:05.468459Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-10-24T16:28:05.468466Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:28:05.468470Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:05.468474Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-10-24T16:28:05.468478Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-10-24T16:28:05.468485Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:28:05.468489Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-10-24T16:28:05.468493Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-10-24T16:28:05.468498Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:28:05.468503Z node 14 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:6] at 72075186224037888 2025-10-24T16:28:05.468514Z node 14 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 2025-10-24T16:28:05.468520Z node 14 :TX_DATASHARD DEBUG: execute_write_unit.cpp:461: Skip empty write operation for [0:6] at 72075186224037888 2025-10-24T16:28:05.468529Z node 14 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-10-24T16:28:05.468542Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:05.468546Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:28:05.468550Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-10-24T16:28:05.468555Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:28:05.468561Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-10-24T16:28:05.468565Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-10-24T16:28:05.468570Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:05.468574Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:05.468581Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-10-24T16:28:05.468586Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:05.468590Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:6] at 72075186224037888 has finished 2025-10-24T16:28:05.468599Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-10-24T16:28:05.468603Z node 14 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-10-24T16:28:05.468608Z node 14 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-10-24T16:28:05.468619Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:05.468654Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=14&id=OTYwYTZmNjItZGVkY2UzM2EtMmE0YjllZi1mY2VjM2RjYw==, ActorId: [14:954:2746], ActorState: ExecuteState, TraceId: 01k8bgryye265c43sddbabqzg5, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 5542, MsgBus: 23735 2025-10-24T16:28:00.923610Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825888294270551:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.923726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00205d/r3tmp/tmpSfnwoB/pdisk_1.dat 2025-10-24T16:28:00.966986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:00.977485Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.977945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825888294270421:2081] 1761323280922725 != 1761323280922728 TServer::EnableGrpc on GrpcPort 5542, node 1 2025-10-24T16:28:00.987217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.987227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.987229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.987269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23735 TClient is connected to server localhost:23735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:01.026561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:01.026594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:01.027476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.036734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.048859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.069422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.093276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.105837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.188866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.333046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892589239364:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.333073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.333191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892589239374:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.333200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.390016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.397757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.415072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.427670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.439391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.453930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.471053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.482583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.510126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892589240238:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.510159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.510180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892589240243:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.510226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892589240245:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.510252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.511189Z node 1 :FLAT_TX_SCHEMESHARD W ... o.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6863, node 4 2025-10-24T16:28:04.844760Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:04.844774Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:04.844779Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:04.844844Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62285 TClient is connected to server localhost:62285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:04.904926Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:04.915723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.927631Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.948039Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:04.960408Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.076477Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:05.191991Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825907057259748:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.192026Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.192140Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825907057259758:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.192151Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.201759Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.210025Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.219152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.226515Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.241131Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.254763Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.268822Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.283329Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:05.299062Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825907057260620:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.299081Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825907057260625:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.299087Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.299128Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825907057260628:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.299135Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:05.299651Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:05.302153Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825907057260627:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:05.363181Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825907057260681:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:06.335765Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:06.395448Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1761323285834, txId: 281474976710673] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14849, MsgBus: 12531 2025-10-24T16:28:00.999135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825887749104640:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.999150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002057/r3tmp/tmpzjVK3w/pdisk_1.dat 2025-10-24T16:28:01.041041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:01.054049Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14849, node 1 2025-10-24T16:28:01.067762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:01.067789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:01.067792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:01.067861Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12531 TClient is connected to server localhost:12531 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:01.104171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:01.104205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-10-24T16:28:01.105829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.117859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.127843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:01.136615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.158824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.180828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.192811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:01.240203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.472473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892044073534:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.472510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.472643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892044073544:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.472654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.531755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.539957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.551826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.566004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.580181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.593853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.607766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.622412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.637790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892044074409:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.637811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.637843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892044074414:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.637868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892044074415:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.637898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.638520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemes ... ecting 2025-10-24T16:28:05.860418Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:05.863603Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:05.863791Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:7564825907163127556:2081] 1761323285833010 != 1761323285833013 TServer::EnableGrpc on GrpcPort 26064, node 4 2025-10-24T16:28:05.870340Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:05.870356Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:05.870359Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:05.870419Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24406 2025-10-24T16:28:05.927244Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:05.934327Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:05.943641Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.965910Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.988513Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.001034Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.300354Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825911458096496:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.300386Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.300491Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825911458096506:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.300503Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.319473Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.329851Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.339727Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.353756Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.369804Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.385965Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.399127Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.410273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.427741Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825911458097368:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.427777Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825911458097373:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.427777Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.427822Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825911458097375:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.427831Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.428918Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:06.437122Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825911458097376:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:06.507416Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825911458097429:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpParams::ParameterTypes [GOOD] Test command err: Trying to start YDB, gRPC: 20630, MsgBus: 24495 2025-10-24T16:28:00.511233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825885858655107:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.511320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002065/r3tmp/tmpr4YrkR/pdisk_1.dat 2025-10-24T16:28:00.571525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:00.572082Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.575989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825885858655002:2081] 1761323280509747 != 1761323280509750 TServer::EnableGrpc on GrpcPort 20630, node 1 2025-10-24T16:28:00.593878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.593895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.593897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.593949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24495 2025-10-24T16:28:00.624072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.624103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:00.624741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:00.654197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:00.666196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:00.675018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.698597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.719397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.731522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.828336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:00.972106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885858656649:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.972151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.972362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885858656659:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.972386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.015513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.026374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.039622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.048301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.061689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.076883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.092767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.105214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.126115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890153624817:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.126143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.126190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890153624822:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.126199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825890153624823:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.126219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... e) 2025-10-24T16:28:05.628812Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:05.628862Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19887 TClient is connected to server localhost:19887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:05.694444Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:05.696335Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:05.698667Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:05.698698Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:05.699780Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:05.703929Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.719252Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.742080Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.755237Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.839657Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:06.027356Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825915196084986:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.027418Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.029848Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825915196085068:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.029878Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.030838Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.040714Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.052435Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.067114Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.081155Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.095408Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.109604Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.123364Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.140861Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825915196085859:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.140904Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825915196085864:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.140902Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.140988Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825915196085866:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.141007Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.141945Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:06.150787Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825915196085867:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:06.217077Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825915196085920:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:06.597010Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9618, MsgBus: 28656 2025-10-24T16:28:00.023032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825885677041162:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.023056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00206c/r3tmp/tmpy3zApR/pdisk_1.dat 2025-10-24T16:28:00.065842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 9618, node 1 2025-10-24T16:28:00.076519Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.082369Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825885677040922:2081] 1761323280018661 != 1761323280018664 2025-10-24T16:28:00.086592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.086607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.086610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.086660Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28656 2025-10-24T16:28:00.125583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.125618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:00.126483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:00.148905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:00.155368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.176399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.196022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.207962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.283248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:00.399938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885677042568:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.400001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.400206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885677042578:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.400222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.459421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.466828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.473338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.488997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.508180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.519628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.531808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.553605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.575642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885677043439:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.575675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.575776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885677043444:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.575781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885677043445:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.575842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.576746Z node 1 :FLAT_TX_SCHEMESHARD W ... PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:05.967135Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:05.968348Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:05.976366Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:05.987244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.007101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.019520Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.140341Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:06.273842Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825912292306893:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.273870Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.274024Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825912292306903:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.274034Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.286533Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.304116Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.312788Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.326000Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.340156Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.353801Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.369452Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.382504Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:06.398506Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825912292307766:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.398537Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.398578Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825912292307771:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.398584Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.398594Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825912292307772:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:06.399304Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:06.408946Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825912292307775:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:06.474273Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825912292307827:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Key (-∞, +∞)","Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> KqpQuery::QueryFromSqs [GOOD] >> DataShardTxOrder::ZigZag [GOOD] >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink |61.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |61.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> KqpFlipJoin::LeftSemi_2 >> KqpJoinOrder::SortingsWithLookupJoin2-RemoveLimitOperator >> KqpJoinOrder::TPCDS88+ColumnStore >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] >> KqpJoinOrder::TPCDS94+ColumnStore >> KqpJoinOrder::SortingsByPK+RemoveLimitOperator >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-10-24T16:28:06.643433Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.643461Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.646777Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.646823Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.668464Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.668623Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=9877184937969211024, session=0, seqNo=0) 2025-10-24T16:28:06.668671Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:06.679448Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=9877184937969211024, session=1) 2025-10-24T16:28:06.679747Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:138:2162], cookie=9002574629675556134, session=2) 2025-10-24T16:28:06.679767Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:138:2162], cookie=9002574629675556134) 2025-10-24T16:28:06.679838Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:138:2162], cookie=11213011151473787486 2025-10-24T16:28:06.679931Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=10784399653734336741, session=1, seqNo=0) 2025-10-24T16:28:06.690617Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=10784399653734336741, session=1) 2025-10-24T16:28:06.690710Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:06.690758Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:06.690774Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:06.690818Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:138:2162], cookie=15935402411132555704, session=1) 2025-10-24T16:28:06.700975Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-10-24T16:28:06.701005Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-10-24T16:28:06.701012Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-10-24T16:28:06.711840Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:06.711887Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:138:2162], cookie=15935402411132555704) 2025-10-24T16:28:06.711897Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-10-24T16:28:06.900260Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.900294Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.904850Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.904942Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.937169Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.937291Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:137:2162], cookie=17752415776760093347, path="") 2025-10-24T16:28:06.948477Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:137:2162], cookie=17752415776760093347, status=SUCCESS) 2025-10-24T16:28:06.948667Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:147:2169], cookie=111, session=0, seqNo=0) 2025-10-24T16:28:06.948706Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:06.948751Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:147:2169], cookie=9583093912116927904, session=1) 2025-10-24T16:28:06.958923Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-10-24T16:28:06.958955Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-10-24T16:28:06.969847Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:147:2169], cookie=111, session=1) 2025-10-24T16:28:06.969873Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:147:2169], cookie=9583093912116927904) 2025-10-24T16:28:06.969882Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-10-24T16:28:07.149335Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.149378Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.154175Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.154223Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.175875Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.176067Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=5219080097823227160, session=0, seqNo=0) 2025-10-24T16:28:07.176112Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.186984Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=5219080097823227160, session=1) 2025-10-24T16:28:07.187191Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:138:2162], cookie=2777182787954776406, session=1) 2025-10-24T16:28:07.187224Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-10-24T16:28:07.197954Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:138:2162], cookie=2777182787954776406) 2025-10-24T16:28:07.198178Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:155:2177], cookie=2789702352896073314) 2025-10-24T16:28:07.198205Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:155:2177], cookie=2789702352896073314) 2025-10-24T16:28:07.198299Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:158:2180], cookie=9760164348390107859, session=0, seqNo=0) 2025-10-24T16:28:07.198343Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:07.209071Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:158:2180], cookie=9760164348390107859, session=2) 2025-10-24T16:28:07.209350Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:138:2162], cookie=1728093051315480857, session=2) 2025-10-24T16:28:07.209389Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2025-10-24T16:28:07.220473Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:138:2162], cookie=1728093051315480857) 2025-10-24T16:28:07.401787Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.401827Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.406335Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.406491Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.427952Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.428186Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=12345, session=0, seqNo=0) 2025-10-24T16:28:07.428232Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.439167Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=12345, session=1) 2025-10-24T16:28:07.439360Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:145:2167], cookie=23456, session=1, seqNo=0) 2025-10-24T16:28:07.450200Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:145:2167], cookie=23456, session=1) 2025-10-24T16:28:07.654663Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.654700Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.659008Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.659214Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.691119Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.691409Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=12345, session=0, seqNo=0) 2025-10-24T16:28:07.691462Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.702480Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=12345, session=1) 2025-10-24T16:28:07.702687Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:145:2167], cookie=23456, session=1, seqNo=0) 2025-10-24T16:28:07.713577Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:145:2167], cookie=23456, session=1) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-10-24T16:28:06.690991Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.691019Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.695223Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.695262Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.716785Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.716969Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=5305013347388538793, session=0, seqNo=0) 2025-10-24T16:28:06.717019Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:06.727830Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=5305013347388538793, session=1) 2025-10-24T16:28:06.727948Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=14274046208207438921, session=0, seqNo=0) 2025-10-24T16:28:06.727988Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:06.738660Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=14274046208207438921, session=2) 2025-10-24T16:28:06.938262Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.938298Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.942719Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.942804Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.974730Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.974930Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=4421790448227758442, session=1, seqNo=0) 2025-10-24T16:28:06.985820Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=4421790448227758442, session=1) 2025-10-24T16:28:07.199498Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.199531Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.205252Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.205307Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.227880Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.228154Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=10467684679651662245, session=0, seqNo=0) 2025-10-24T16:28:07.228207Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.239028Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=10467684679651662245, session=1) 2025-10-24T16:28:07.450404Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.450446Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.454695Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.454889Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.476560Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.476719Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:138:2162], cookie=13032295225941147541, path="") 2025-10-24T16:28:07.487947Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:138:2162], cookie=13032295225941147541, status=SUCCESS) 2025-10-24T16:28:07.488193Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:147:2169], cookie=13135893753916260070, session=0, seqNo=0) 2025-10-24T16:28:07.488238Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.499181Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:147:2169], cookie=13135893753916260070, session=1) 2025-10-24T16:28:07.499410Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:148:2170], cookie=111, session=0, seqNo=0) 2025-10-24T16:28:07.499458Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:07.499502Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:148:2170], cookie=222, seqNo=0 2025-10-24T16:28:07.510453Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:148:2170], cookie=111, session=2) 2025-10-24T16:28:07.710926Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.710959Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.715115Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.715290Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.746996Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.747125Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:137:2162], cookie=18070651273063803104, path="") 2025-10-24T16:28:07.757957Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:137:2162], cookie=18070651273063803104, status=SUCCESS) 2025-10-24T16:28:07.758169Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=6930980700345073068, session=0, seqNo=0) 2025-10-24T16:28:07.758208Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.768977Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=6930980700345073068, session=1) 2025-10-24T16:28:07.769152Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:147:2169], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:07.769194Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:07.769206Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:07.769267Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:148:2170], cookie=111, session=0, seqNo=0) 2025-10-24T16:28:07.769277Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:07.769288Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:148:2170], cookie=222, session=1, seqNo=0) 2025-10-24T16:28:07.779935Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:147:2169], cookie=123) 2025-10-24T16:28:07.779960Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:148:2170], cookie=111, session=2) 2025-10-24T16:28:07.779967Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:148:2170], cookie=222, session=1) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-10-24T16:27:43.667982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:43.680489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:43.680513Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.680960Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:43.680996Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:43.681052Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:43.681761Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:43.690517Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:43.690568Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:43.690678Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:43.690686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:43.690694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:43.690757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:43.690767Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:43.690776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:43.727322Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.730452Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:43.730516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.730534Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:43.730539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:43.730542Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:43.730546Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.730593Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.730598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.730658Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:43.730676Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:43.730683Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.730689Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.730695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:43.730699Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:43.730704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:43.730708Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:43.730712Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:43.730720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.730724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.730728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:43.731068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:43.731075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.731084Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:43.731114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:43.731124Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:43.731133Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:43.731139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.731142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:43.731146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:43.731149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.731203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.731206Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:43.731208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:43.731211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.731218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:43.731221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:43.731223Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:43.731226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.731231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.750608Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:43.750632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.750638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.750651Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:43.750665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:43.750752Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.750758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.750764Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:43.750779Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:43.750782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:43.750812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.750818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:43.750822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:43.750825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:43.753587Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:43.753605Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.753655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.753659Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.753668Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.753674Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:43.753678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:43.753684Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:43.753688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... ions 2025-10-24T16:28:07.679729Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-10-24T16:28:07.679878Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:243:2235], Recipient [2:243:2235]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:07.679889Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:07.679903Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:28:07.679908Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:28:07.679913Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:28:07.679920Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-10-24T16:28:07.679924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-10-24T16:28:07.679931Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.679936Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-10-24T16:28:07.681107Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-10-24T16:28:07.681123Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-10-24T16:28:07.681313Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-10-24T16:28:07.681320Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681325Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-10-24T16:28:07.681329Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-10-24T16:28:07.681333Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-10-24T16:28:07.681338Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681342Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-10-24T16:28:07.681345Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:07.681349Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-10-24T16:28:07.681360Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2025-10-24T16:28:07.681364Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-10-24T16:28:07.681368Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2025-10-24T16:28:07.681375Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681379Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:07.681383Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-10-24T16:28:07.681387Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-10-24T16:28:07.681395Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681398Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-10-24T16:28:07.681402Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-10-24T16:28:07.681405Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-10-24T16:28:07.681410Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-10-24T16:28:07.681417Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-10-24T16:28:07.681421Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-10-24T16:28:07.681428Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681432Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-10-24T16:28:07.681436Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-10-24T16:28:07.681440Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-10-24T16:28:07.681444Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681447Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-10-24T16:28:07.681451Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2025-10-24T16:28:07.681455Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2025-10-24T16:28:07.681459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2025-10-24T16:28:07.681466Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-10-24T16:28:07.681469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-10-24T16:28:07.681544Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-10-24T16:28:07.681552Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:28:07.681561Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681565Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-10-24T16:28:07.681569Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-10-24T16:28:07.681573Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-10-24T16:28:07.681636Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-10-24T16:28:07.681641Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-10-24T16:28:07.681644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-10-24T16:28:07.681648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-10-24T16:28:07.681654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000016:45] at 9437184 is Executed 2025-10-24T16:28:07.681657Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-10-24T16:28:07.681661Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000016:45] at 9437184 has finished 2025-10-24T16:28:07.781221Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:07.781245Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:28:07.781250Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:28:07.781253Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:28:07.792469Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-10-24T16:28:07.792494Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-10-24T16:28:07.792508Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:28:07.792517Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-10-24T16:28:07.792538Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:28:07.792548Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:28:07.792938Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-10-24T16:28:07.792956Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-10-24T16:28:07.792968Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-10-24T16:28:07.792975Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-10-24T16:28:07.792991Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:28:07.792999Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 2541, MsgBus: 3556 2025-10-24T16:28:00.106514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825886074270491:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.106532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:00.111061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002069/r3tmp/tmppymoqk/pdisk_1.dat 2025-10-24T16:28:00.141658Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:00.141923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825886074270453:2081] 1761323280106186 != 1761323280106189 TServer::EnableGrpc on GrpcPort 2541, node 1 2025-10-24T16:28:00.158887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.158901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.158903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.158960Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3556 TClient is connected to server localhost:3556 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:00.210539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:00.210579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.210596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-10-24T16:28:00.211690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:00.219608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:00.226928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.250272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.269599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.280473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.496085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886074272097:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.496109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.500311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886074272107:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.500350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.552629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.563883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.575246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.594415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.606897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.621773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.634946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.649051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.670954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886074272969:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.670975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.671057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886074272974:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.671078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825886074272975:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.671090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.671955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operat ... p:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:06.615111Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:06.615114Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:06.615167Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12151 TClient is connected to server localhost:12151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:06.665506Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:06.671145Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:06.674754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.686711Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.706536Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:06.719014Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:07.034984Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825918246359198:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.035009Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.035050Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825918246359208:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.035055Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.046169Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.053330Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.067678Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.081984Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.095802Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.110162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.124599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.138415Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.155963Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825918246360071:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.155998Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.156002Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825918246360076:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.156044Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825918246360078:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.156055Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.156739Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:07.164789Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825918246360079:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:07.220355Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825918246360132:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:07.472232Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.596558Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore >> KqpJoinOrder::SortingsWithLookupJoin2+RemoveLimitOperator >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryCancelWrite >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] >> KqpJoinOrder::UdfConstantFold-ColumnStore >> KqpFlipJoin::Right_3 >> OlapEstimationRowsCorrectness::TPCH10 >> KqpQuery::ExecuteWriteQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21948, MsgBus: 27931 2025-10-24T16:28:00.107546Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825885760068694:2080];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.107582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:00.111392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002066/r3tmp/tmp8lIhcy/pdisk_1.dat 2025-10-24T16:28:00.137873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:00.137904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:00.139058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:00.147312Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825885760068641:2081] 1761323280106869 != 1761323280106872 2025-10-24T16:28:00.153327Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21948, node 1 2025-10-24T16:28:00.158982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:00.158990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:00.158991Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:00.159025Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27931 TClient is connected to server localhost:27931 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:00.218692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:00.229972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:00.240736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.258824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.277699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.288157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.548287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885760070282:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.548326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.548533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885760070292:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.548542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.620581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.633043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.641969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.657369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.672090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.686978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.698417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.711714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.732763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885760071153:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.732782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885760071158:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.732790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.732838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825885760071161:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.732863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.733502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... MESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.049929Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.061089Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.075214Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.088655Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.103039Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.116803Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.131722Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.147629Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825917401868643:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.147649Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825917401868648:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.147655Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.147691Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825917401868651:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.147695Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.148307Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:07.157781Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825917401868650:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:07.223888Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825917401868704:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:07.537504Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.546200Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.558254Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.667802Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; {"Plan":{"Plans":[{"PlanNodeId":18,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":17,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_1_2","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_2"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":15,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Delete","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Upsert","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_1","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_1"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","ReadRangesPointPrefixLen":"1","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","ReadRangesKeys":["Fk"],"Table":"SecondaryKeys\/Index\/indexImplTable","ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Subplan Name":"CTE Stage_5","Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_1","Node Type":"Precompute_1_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_2","Node Type":"Precompute_1_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"}]},{"name":"\/Root\/SecondaryKeys\/Index\/indexImplTable","reads":[{"columns":["Fk","Key"],"scan_by":["Fk [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":7,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Operators":[{"Name":"Delete","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":15,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Operators":[{"Name":"Upsert","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":22,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |61.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-10-24T16:28:06.758316Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.758355Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.761766Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.761816Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.783745Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.783971Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=10239207328091469229, session=0, seqNo=0) 2025-10-24T16:28:06.784021Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:06.794912Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=10239207328091469229, session=1) 2025-10-24T16:28:06.795033Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=8339262173872950842, session=0, seqNo=0) 2025-10-24T16:28:06.795075Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:06.806044Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=8339262173872950842, session=2) 2025-10-24T16:28:06.806174Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:138:2162], cookie=111, name="Lock1") 2025-10-24T16:28:06.817223Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:06.817344Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:06.817401Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:06.817420Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:06.828562Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=222) 2025-10-24T16:28:06.828680Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:138:2162], cookie=333, name="Lock1") 2025-10-24T16:28:06.839632Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:138:2162], cookie=333) 2025-10-24T16:28:06.989904Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.989938Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.992931Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.993007Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.024815Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.025015Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=3190241560550554170, session=0, seqNo=0) 2025-10-24T16:28:07.025059Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.036015Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=3190241560550554170, session=1) 2025-10-24T16:28:07.036134Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=12935591327127858441, session=0, seqNo=0) 2025-10-24T16:28:07.036173Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:07.047223Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=12935591327127858441, session=2) 2025-10-24T16:28:07.047429Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:149:2171], cookie=16176151567155946275, name="Sem1", limit=1) 2025-10-24T16:28:07.047481Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:07.058338Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:149:2171], cookie=16176151567155946275) 2025-10-24T16:28:07.058446Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:07.058494Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:07.058540Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-10-24T16:28:07.069415Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=111) 2025-10-24T16:28:07.069453Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2025-10-24T16:28:07.069599Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2179], cookie=2949307297867168289, name="Sem1") 2025-10-24T16:28:07.069642Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2179], cookie=2949307297867168289) 2025-10-24T16:28:07.069707Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2182], cookie=11132065406852250927, name="Sem1") 2025-10-24T16:28:07.069714Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2182], cookie=11132065406852250927) 2025-10-24T16:28:07.069750Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:137:2162], cookie=333, name="Sem1") 2025-10-24T16:28:07.069783Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-10-24T16:28:07.080961Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:137:2162], cookie=333) 2025-10-24T16:28:07.081131Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=17300355352022260327, name="Sem1") 2025-10-24T16:28:07.081158Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=17300355352022260327) 2025-10-24T16:28:07.081218Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2190], cookie=14709169787063880729, name="Sem1") 2025-10-24T16:28:07.081224Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2190], cookie=14709169787063880729) 2025-10-24T16:28:07.081256Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:137:2162], cookie=444, name="Sem1") 2025-10-24T16:28:07.081286Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-10-24T16:28:07.092122Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:137:2162], cookie=444) 2025-10-24T16:28:07.092255Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=5088200146697957499, name="Sem1") 2025-10-24T16:28:07.092270Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=5088200146697957499) 2025-10-24T16:28:07.092324Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2198], cookie=3609459560070027624, name="Sem1") 2025-10-24T16:28:07.092328Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2198], cookie=3609459560070027624) 2025-10-24T16:28:07.233249Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.233288Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.238599Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.238651Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.261534Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.261694Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:138:2162], cookie=17698384195643633462, name="Sem1", limit=1) 2025-10-24T16:28:07.261743Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:07.273299Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:138:2162], cookie=17698384195643633462) 2025-10-24T16:28:07.273477Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:147:2169], cookie=9026893891091815549, name="Sem2", limit=1) 2025-10-24T16:28:07.273517Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2025-10-24T16:28:07.284295Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:147:2169], cookie=9026893891091815549) 2025-10-24T16:28:07.284444Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2174], cookie=16200530890531445505, name="Sem1") 2025-10-24T16:28:07.284463Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2174], cookie=16200530890531445505) 2025-10-24T16:28:07.284503Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:155:2177], cookie=2489549248376999158, name="Sem2") 2025-10-24T16:28:07.284507Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:155:2177], cookie=2489549248376999158) 2025-10-24T16:28:07.286932Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.286961Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema: ... TxSemaphoreCreate::Complete (sender=[4:250:2271], cookie=14462022933082518939) 2025-10-24T16:28:07.979392Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:07.979432Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:07.990422Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=111) 2025-10-24T16:28:07.990621Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-10-24T16:28:08.001956Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=222) 2025-10-24T16:28:08.002131Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:138:2162], cookie=333, name="Sem1") 2025-10-24T16:28:08.002173Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-10-24T16:28:08.023286Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:138:2162], cookie=333) 2025-10-24T16:28:08.023457Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=444, session=2, semaphore="Sem1" count=1) 2025-10-24T16:28:08.034326Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=444) 2025-10-24T16:28:08.034444Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:138:2162], cookie=555, name="Sem1") 2025-10-24T16:28:08.034470Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-10-24T16:28:08.034480Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-10-24T16:28:08.045356Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:138:2162], cookie=555) 2025-10-24T16:28:08.131003Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:08.131050Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:08.135728Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:08.135972Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:08.167763Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:08.167949Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=12054781672571875720, session=0, seqNo=0) 2025-10-24T16:28:08.167987Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:08.178985Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=12054781672571875720, session=1) 2025-10-24T16:28:08.179114Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=112, name="Sem1", limit=5) 2025-10-24T16:28:08.179166Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:08.190072Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=112) 2025-10-24T16:28:08.190184Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=113, name="Sem1") 2025-10-24T16:28:08.201109Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=113) 2025-10-24T16:28:08.201233Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=114, name="Sem1", force=0) 2025-10-24T16:28:08.201265Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-10-24T16:28:08.212164Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=114) 2025-10-24T16:28:08.212275Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:137:2162], cookie=2012017946770391304 2025-10-24T16:28:08.212332Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=115, name="Sem1", limit=5) 2025-10-24T16:28:08.223266Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=115) 2025-10-24T16:28:08.223376Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=116, name="Sem1") 2025-10-24T16:28:08.234344Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=116) 2025-10-24T16:28:08.234450Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=117, name="Sem1", force=0) 2025-10-24T16:28:08.245422Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=117) 2025-10-24T16:28:08.245521Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=118, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:08.256449Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=118) 2025-10-24T16:28:08.256543Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=119, name="Sem1") 2025-10-24T16:28:08.267463Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=119) 2025-10-24T16:28:08.267554Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=120, name="Sem1") 2025-10-24T16:28:08.267585Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=120) 2025-10-24T16:28:08.267614Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:137:2162], cookie=9813752332851641957, session=1) 2025-10-24T16:28:08.267636Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-10-24T16:28:08.280280Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:137:2162], cookie=9813752332851641957) 2025-10-24T16:28:08.280405Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=121, name="Sem1", limit=5) 2025-10-24T16:28:08.291356Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=121) 2025-10-24T16:28:08.291473Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=122, name="Sem1") 2025-10-24T16:28:08.302433Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=122) 2025-10-24T16:28:08.302560Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=123, name="Sem1", force=0) 2025-10-24T16:28:08.313591Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=123) 2025-10-24T16:28:08.313710Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=124, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:08.324656Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=124) 2025-10-24T16:28:08.324764Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=125, name="Sem1") 2025-10-24T16:28:08.349569Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=125) 2025-10-24T16:28:08.349688Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=126, name="Sem1") 2025-10-24T16:28:08.349713Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=126) 2025-10-24T16:28:08.349839Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=127, name="Sem1", limit=5) 2025-10-24T16:28:08.349849Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=127) 2025-10-24T16:28:08.349879Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=128, name="Sem1") 2025-10-24T16:28:08.349888Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=128) 2025-10-24T16:28:08.349912Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=129, name="Sem1", force=0) 2025-10-24T16:28:08.349920Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=129) 2025-10-24T16:28:08.349951Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=130, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:08.349959Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=130) 2025-10-24T16:28:08.349983Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=131, name="Sem1") 2025-10-24T16:28:08.349990Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=131) 2025-10-24T16:28:08.350010Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=132, name="Sem1") 2025-10-24T16:28:08.350016Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=132) >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 32360, MsgBus: 20493 2025-10-24T16:27:54.740869Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825861282708633:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.740887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020af/r3tmp/tmpa0SSWL/pdisk_1.dat 2025-10-24T16:27:54.789387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.789526Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.789672Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825861282708605:2081] 1761323274740599 != 1761323274740602 TServer::EnableGrpc on GrpcPort 32360, node 1 2025-10-24T16:27:54.796474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.796489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.796490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.796532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20493 TClient is connected to server localhost:20493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.835451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.838915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.848714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.848747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.849860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.898944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.916300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.928594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.033726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:55.063451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825865577677561:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.063476Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.063545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825865577677571:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.063554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.107497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.115413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.124757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.131788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.139135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.153405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.167516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.181889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:55.199274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825865577678434:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.199303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.199343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825865577678439:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.199357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825865577678440:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.199370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:55.200048Z node 1 :FLAT_TX_SCHEMESHARD ... elf is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.060238Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.075470Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.093376Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:59.106218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.120631Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.400351Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825884230516481:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.400381Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.400568Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825884230516491:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.400576Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.410100Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.420755Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.432137Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.449483Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.459224Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.473148Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.487836Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.504521Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.528608Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825884230517351:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.528650Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.528655Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825884230517356:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.528683Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825884230517358:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.528690Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.529675Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:59.536234Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825884230517360:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:27:59.610050Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825884230517412:3550] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:59.892183Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.930807Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:03.173753Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-10-24T16:28:03.438800Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-10-24T16:28:03.928505Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7564825879935547569:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:03.928545Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:28:03.978483Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-10-24T16:28:03.978852Z --------------- Start update --------------- 2025-10-24T16:28:03.989489Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |61.3%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore >> KqpJoinOrder::SortingsByPKWithLookupJoin+RemoveLimitOperator >> OlapEstimationRowsCorrectness::TPCH9 >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> KqpJoinOrder::TPCH9_100 >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 >> KqpJoinOrder::CanonizedJoinOrderTPCH8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-10-24T16:28:06.758385Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.758425Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.762768Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.762816Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.784460Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.784597Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:138:2162], cookie=10545984430930317001, path="/foo/bar/baz") 2025-10-24T16:28:06.795670Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:138:2162], cookie=10545984430930317001, status=SUCCESS) 2025-10-24T16:28:06.795840Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:147:2169], cookie=1378664795185167315) 2025-10-24T16:28:06.806647Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:147:2169], cookie=1378664795185167315) 2025-10-24T16:28:06.806793Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:152:2174], cookie=582342447858173109, path="/foo/bar/baz") 2025-10-24T16:28:06.817593Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:152:2174], cookie=582342447858173109, status=SUCCESS) 2025-10-24T16:28:06.817756Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:157:2179], cookie=17616565323259969279) 2025-10-24T16:28:06.828688Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:157:2179], cookie=17616565323259969279) 2025-10-24T16:28:06.831834Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.831887Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.831949Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.832103Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:06.874638Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:06.874776Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:198:2211], cookie=3542619340719808830) 2025-10-24T16:28:06.885806Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:198:2211], cookie=3542619340719808830) 2025-10-24T16:28:06.885988Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2218], cookie=17438830260751307289, path="/foo/bar/baz") 2025-10-24T16:28:06.896871Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2218], cookie=17438830260751307289, status=SUCCESS) 2025-10-24T16:28:06.897015Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:211:2223], cookie=9237182709560655148, path="/foo/bar/baz") 2025-10-24T16:28:06.897030Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:211:2223], cookie=9237182709560655148, status=PRECONDITION_FAILED) 2025-10-24T16:28:06.995838Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:06.995894Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:06.998974Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:06.999031Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.030812Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.030946Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:137:2162], cookie=3874394760543707453, name="Lock1") 2025-10-24T16:28:07.030966Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:137:2162], cookie=3874394760543707453) 2025-10-24T16:28:07.241966Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.242010Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.247152Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.247202Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.269051Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.269250Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=5649532846802871407, session=0, seqNo=0) 2025-10-24T16:28:07.269299Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.281438Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=5649532846802871407, session=1) 2025-10-24T16:28:07.281575Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:07.281640Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:07.281660Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:07.292645Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=111) 2025-10-24T16:28:07.292863Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:149:2171], cookie=2487792477677089516, name="Lock1", force=0) 2025-10-24T16:28:07.303878Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:149:2171], cookie=2487792477677089516) 2025-10-24T16:28:07.304075Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:154:2176], cookie=6561064872276504863, name="Sem1", force=0) 2025-10-24T16:28:07.315022Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:154:2176], cookie=6561064872276504863) 2025-10-24T16:28:07.315169Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:159:2181], cookie=4442031097327740325, name="Sem1", limit=42) 2025-10-24T16:28:07.315205Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-10-24T16:28:07.328237Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:159:2181], cookie=4442031097327740325) 2025-10-24T16:28:07.328455Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:164:2186], cookie=6150448824694095120, name="Sem1", force=0) 2025-10-24T16:28:07.328490Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-10-24T16:28:07.339501Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:164:2186], cookie=6150448824694095120) 2025-10-24T16:28:07.339696Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:169:2191], cookie=17226364645269949691, name="Sem1", force=0) 2025-10-24T16:28:07.352078Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:169:2191], cookie=17226364645269949691) 2025-10-24T16:28:07.491779Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.491818Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.495590Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.495768Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.517333Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.517502Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=10635364261678946457, session=0, seqNo=0) 2025-10-24T16:28:07.517544Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:07.528353Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=10635364261678946457, session=1) 2025-10-24T16:28:07.528461Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=14483708853271242102, session=0, seqNo=0) 2025-10-24T16:28:07.528499Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:07.539381Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=14483708853271242102, session=2) 2025-10-24T16:28:07.539505Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:138:2162], cookie=8283419038860843541 2025-10-24T16:28:07.539632Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:150:2172], cookie=7343563993725687140, name="Sem1", limit=3) 2025-10-24T16:28:07.539675Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:07.552242Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:150:2172], cookie=7343563993725687140) 2025-10-24T16:28:07.552369Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:138:2162], cookie=112, name="Sem1") 2025-10-24T16:28:07.552395Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:138:2162], cookie=112) 2025-10-24T16:28:07.552426Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:138:2162], cookie=113, name="Sem1") 2025-10-24T16:28:07.552433Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:138:2162], cookie=113) 2025-10-24T16:28:07.552462Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=2728843514202799057, session=2, seqNo=0) 2025-10-24T16:28:07.563478Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessio ... de 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.936290Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.946622Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=129, session=1, semaphore="Sem2" count=2) 2025-10-24T16:28:08.957801Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=129) 2025-10-24T16:28:08.957940Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:138:2162], cookie=130, name="Sem2") 2025-10-24T16:28:08.957968Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:138:2162], cookie=130) 2025-10-24T16:28:08.958017Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:138:2162], cookie=131, session=1, semaphore="Sem2" count=1) 2025-10-24T16:28:08.968993Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:138:2162], cookie=131) 2025-10-24T16:28:08.969128Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:138:2162], cookie=132, name="Sem2") 2025-10-24T16:28:08.969157Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:138:2162], cookie=132) 2025-10-24T16:28:08.969198Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:138:2162], cookie=133, name="Sem2") 2025-10-24T16:28:08.969207Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:138:2162], cookie=133) 2025-10-24T16:28:09.157951Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:09.157987Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:09.162393Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:09.162561Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:09.196268Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:09.197687Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=6503706409180490914, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-10-24T16:28:09.197763Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2025-10-24T16:28:09.208902Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=6503706409180490914) 2025-10-24T16:28:09.209089Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=7135220771686813580, path="/Root1/Res", config={ }) 2025-10-24T16:28:09.209181Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-10-24T16:28:09.220188Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=7135220771686813580) 2025-10-24T16:28:09.220384Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:152:2174], cookie=2825060529522167607, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-10-24T16:28:09.220437Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2025-10-24T16:28:09.231332Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:152:2174], cookie=2825060529522167607) 2025-10-24T16:28:09.231494Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:157:2179], cookie=18252331509605732559, path="/Root2/Res", config={ }) 2025-10-24T16:28:09.231574Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-10-24T16:28:09.242409Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:157:2179], cookie=18252331509605732559) 2025-10-24T16:28:09.242569Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:162:2184], cookie=15742556146140225076, path="/Root2/Res/Subres", config={ }) 2025-10-24T16:28:09.242618Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-10-24T16:28:09.253443Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:162:2184], cookie=15742556146140225076) 2025-10-24T16:28:09.253849Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 5457093408962702118. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:09.253863Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=5457093408962702118) 2025-10-24T16:28:09.294791Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.345972Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.377033Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.377269Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:175:2193]. Cookie: 8613507329963726339. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-10-24T16:28:09.377446Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 4779581348455740451. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:09.377458Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=4779581348455740451) 2025-10-24T16:28:09.418455Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.459444Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.459689Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:184:2200]. Cookie: 13345437824603618275. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-10-24T16:28:09.459823Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 15196235319837964031. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:09.459831Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=15196235319837964031) 2025-10-24T16:28:09.459926Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 3441322829731801189. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:09.459932Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=3441322829731801189) 2025-10-24T16:28:09.490501Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.490540Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:09.490717Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:191:2207]. Cookie: 12608262707311609134. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> KqpJoinOrder::Chain65Nodes >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> KqpJoinOrder::SortingsWithLookupJoin1+RemoveLimitOperator >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup |61.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |61.4%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-10-24T16:27:43.074640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:43.099074Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:43.099134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:43.099141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002986/r3tmp/tmpy8nRIL/pdisk_1.dat 2025-10-24T16:27:43.162127Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:43.162855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:43.162878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:43.162967Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323262798242 != 1761323262798245 2025-10-24T16:27:43.193939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:43.241420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:43.284491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:43.357146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:43.370758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:43.371015Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:43.371085Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:27:43.371134Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:43.379715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:43.379889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:43.379911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:43.380037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:27:43.380044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:27:43.380048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:27:43.380097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:43.380114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:43.380123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:27:43.390407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.393567Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:27:43.393628Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.393646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:27:43.393651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:27:43.393655Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:27:43.393660Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:27:43.393714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.393720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.393809Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:27:43.393830Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:27:43.393844Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:27:43.393853Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.393858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:27:43.393863Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:27:43.393866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:27:43.393869Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:27:43.393873Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:27:43.393880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.393884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.393889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:27:43.393966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:27:43.393969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.393985Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:27:43.394027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:27:43.394035Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:27:43.394047Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:27:43.394053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.394057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:27:43.394060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:27:43.394063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.394100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.394103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:27:43.394106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:27:43.394108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:27:43.394116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:27:43.394118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:27:43.394122Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:27:43.394125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:27:43.394129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.394340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:27:43.394346Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:27:43.404585Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:27:43.404605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:27:43.404611Z n ... hard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-10-24T16:28:09.893335Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-10-24T16:28:09.893340Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-10-24T16:28:09.893344Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:09.893349Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:28:09.893355Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-10-24T16:28:09.893360Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-10-24T16:28:09.893364Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [3500:281474976715666] at 72075186224037889 2025-10-24T16:28:09.893370Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-10-24T16:28:09.893374Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:09.893379Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-10-24T16:28:09.893384Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-10-24T16:28:09.893403Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-10-24T16:28:09.893408Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-10-24T16:28:09.893412Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-10-24T16:28:09.893417Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-10-24T16:28:09.893421Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-10-24T16:28:09.893425Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-10-24T16:28:09.893430Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-10-24T16:28:09.893435Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-10-24T16:28:09.893462Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-10-24T16:28:09.893468Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-10-24T16:28:09.893472Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:28:09.893477Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:28:09.893484Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-10-24T16:28:09.893488Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:28:09.893493Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-10-24T16:28:09.893497Z node 15 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:09.893501Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2025-10-24T16:28:09.893505Z node 15 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-10-24T16:28:09.893509Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2025-10-24T16:28:09.904079Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-10-24T16:28:09.904130Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:09.904143Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-10-24T16:28:09.904171Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1078:2845], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:28:09.904182Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:09.904281Z node 15 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-10-24T16:28:09.904292Z node 15 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-10-24T16:28:09.904297Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-10-24T16:28:09.904307Z node 15 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1078:2845], exec latency: 0 ms, propose latency: 0 ms 2025-10-24T16:28:09.904313Z node 15 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:28:09.904759Z node 15 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [15:590:2518], Recipient [15:675:2566]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-10-24T16:28:09.904801Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:28:09.904817Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-10-24T16:28:09.904842Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:28:09.904849Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:28:09.904856Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:09.904865Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:28:09.904876Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-10-24T16:28:09.904882Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:28:09.904886Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:09.904891Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:28:09.904895Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:28:09.904915Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-10-24T16:28:09.905005Z node 15 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2427: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-10-24T16:28:09.905014Z node 15 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-10-24T16:28:09.905022Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[15:590:2518], 3} after executionsCount# 1 2025-10-24T16:28:09.905032Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[15:590:2518], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:09.905068Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[15:590:2518], 3} finished in read 2025-10-24T16:28:09.905081Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:28:09.905085Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:28:09.905090Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:09.905095Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:09.905110Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-10-24T16:28:09.905114Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:09.905119Z node 15 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:7] at 72075186224037888 has finished 2025-10-24T16:28:09.905125Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:28:09.905143Z node 15 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> KqpQuery::QueryCancelWriteImmediate [GOOD] |61.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |61.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> KqpJoinOrder::Sortings4Year+RemoveLimitOperator >> KqpFlipJoin::LeftSemi_3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 6282, MsgBus: 10570 2025-10-24T16:27:59.209739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825882194647148:2272];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.209792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00207a/r3tmp/tmpyD4avF/pdisk_1.dat 2025-10-24T16:27:59.274383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:59.275320Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825882194646873:2081] 1761323279203196 != 1761323279203199 2025-10-24T16:27:59.278806Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6282, node 1 2025-10-24T16:27:59.284393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.284405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.284407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.284445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10570 2025-10-24T16:27:59.315238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.315277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.316239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.337961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.341385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:27:59.438429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.463406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.501005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.521072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.548508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:59.624292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882194648528:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.624321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.624439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882194648538:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.624455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.678320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.697350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.712101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.724764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.742124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.753438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.771488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.781525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.799046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882194649405:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.799076Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.799217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882194649410:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.799225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882194649411:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.799230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Da ... 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:09.804370Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:09.804372Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:09.804435Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4522 TClient is connected to server localhost:4522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:09.893191Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:09.896618Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:09.899909Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:09.917474Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:09.948818Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.964642Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:10.076530Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:10.288767Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825931647477960:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.288794Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.288902Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825931647477969:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.288908Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.301211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.314839Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.325940Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.337663Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.353374Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.377211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.393083Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.418610Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.462148Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825931647478826:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.462230Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.462397Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825931647478837:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.462403Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564825931647478836:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.462408Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.463466Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.468868Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:28:10.469183Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564825931647478840:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:10.562921Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564825931647478892:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.764948Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession |61.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |61.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 24575, MsgBus: 24154 2025-10-24T16:28:08.198864Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825920301027495:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.198942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:08.201324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f1a/r3tmp/tmpt9VoxU/pdisk_1.dat 2025-10-24T16:28:08.219779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.219805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.220763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:08.225524Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825920301027391:2081] 1761323288198111 != 1761323288198114 2025-10-24T16:28:08.230731Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24575, node 1 2025-10-24T16:28:08.241900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.241917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.241919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.241971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24154 TClient is connected to server localhost:24154 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:08.302984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.313680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.326113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.345666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:08.368268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.388274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.634102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920301029036:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.634138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.634192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920301029046:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.634199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.697700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.732590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.756568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.767104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.776385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.790198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.804655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.818097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.836898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920301029909:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.836922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.836957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920301029914:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.836967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920301029915:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.836974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.837894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:09.965962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:09.968123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:09.975516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:09.995051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:10.030870Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:10.034149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:10.057177Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:10.339902Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825931646873618:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.339931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.340060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825931646873628:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.340072Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.353475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.373032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.387271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.403430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.425993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.440182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.456970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.489398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.516721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825931646874489:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.516771Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.516933Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825931646874494:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.516943Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825931646874495:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.516965Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.518128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.523095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:28:10.523176Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564825931646874498:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:10.608525Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564825931646874550:3550] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.812021Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:10.957593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.979617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.001238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.020989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-10-24T16:28:03.646269Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:03.646304Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:03.650381Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:03.650430Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:03.672250Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:03.673341Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:138:2162], cookie=18054433428550671623, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-10-24T16:28:03.673415Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:03.690657Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:138:2162], cookie=18054433428550671623) 2025-10-24T16:28:03.690862Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:148:2170], cookie=15055016909924888810, path="/Root/Res", config={ }) 2025-10-24T16:28:03.690915Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-10-24T16:28:03.701727Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:148:2170], cookie=15055016909924888810) 2025-10-24T16:28:03.702158Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:153:2175]. Cookie: 12063175239438961613. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:03.702172Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:153:2175], cookie=12063175239438961613) 2025-10-24T16:28:03.702256Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:153:2175]. Cookie: 177049982389123225. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-10-24T16:28:03.702261Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:153:2175], cookie=177049982389123225) 2025-10-24T16:28:05.733648Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:05.733700Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:05.745314Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:05.745463Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:05.771358Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:05.771588Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:138:2162], cookie=6387899134588966266, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-10-24T16:28:05.771707Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:05.784561Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:138:2162], cookie=6387899134588966266) 2025-10-24T16:28:05.784816Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2170]. Cookie: 15519317394642091188. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:05.784828Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2170], cookie=15519317394642091188) 2025-10-24T16:28:05.784920Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2170]. Cookie: 16663077273057089521. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:05.784927Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2170], cookie=16663077273057089521) 2025-10-24T16:28:05.784990Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2170]. Cookie: 11789791823931560239. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-10-24T16:28:05.784997Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:148:2170], cookie=11789791823931560239) 2025-10-24T16:28:05.785041Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2170]. Cookie: 10274598543149718421. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-10-24T16:28:05.785045Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:148:2170], cookie=10274598543149718421) 2025-10-24T16:28:07.768392Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:07.768433Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:07.772708Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:07.772744Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:07.794370Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:07.794570Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=846987618641160397, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-10-24T16:28:07.794676Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:07.805708Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=846987618641160397) 2025-10-24T16:28:07.805905Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:148:2170], cookie=7075347118472397016, path="/Root/Res1", config={ }) 2025-10-24T16:28:07.805958Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-10-24T16:28:07.816838Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:148:2170], cookie=7075347118472397016) 2025-10-24T16:28:07.816999Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:153:2175], cookie=8035241021162435214, path="/Root/Res2", config={ }) 2025-10-24T16:28:07.817072Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-10-24T16:28:07.827944Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:153:2175], cookie=8035241021162435214) 2025-10-24T16:28:07.828185Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:158:2180]. Cookie: 2150105765730517497. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:07.828196Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:158:2180], cookie=2150105765730517497) 2025-10-24T16:28:07.828274Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:158:2180]. Cookie: 17205125265002764586. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:07.828279Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:158:2180], cookie=17205125265002764586) 2025-10-24T16:28:07.828332Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:158:2180]. Cookie: 10316545930385572832. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-10-24T16:28:07.828342Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:158:2180], cookie=10316545930385572832) 2025-10-24T16:28:09.857641Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:09.857676Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:09.863251Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:09.863550Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:09.886054Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:09.886216Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:138:2162], cookie=5956060400832132138, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-10-24T16:28:09.886300Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:09.899980Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:138:2162], cookie=5956060400832132138) 2025-10-24T16:28:09.900306Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 18339925996889112024. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:09.900319Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=18339925996889112024) 2025-10-24T16:28:09.900407Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:148:2170]. Cookie: 15918518242655899765. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-10-24T16:28:09.900413Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:148:2170], cookie=15918518242655899765) 2025-10-24T16:28:12.039052Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:12.039099Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:12.043919Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:12.043971Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:12.072514Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:12.072716Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2160], cookie=17695139280070537562, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-10-24T16:28:12.072784Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-10-24T16:28:12.085118Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2160], cookie=17695139280070537562) 2025-10-24T16:28:12.085333Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:145:2167], cookie=10714136573355996984, path="/Root/Res", config={ }) 2025-10-24T16:28:12.085407Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-10-24T16:28:12.096736Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:145:2167], cookie=10714136573355996984) 2025-10-24T16:28:12.097018Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:150:2172]. Cookie: 1248803520247570011. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-10-24T16:28:12.097029Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:150:2172], cookie=1248803520247570011) 2025-10-24T16:28:12.097133Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:154:2176], cookie=4294263229150666498, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-10-24T16:28:12.097187Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2025-10-24T16:28:12.097217Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:150:2172]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-10-24T16:28:12.108450Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:154:2176], cookie=4294263229150666498) 2025-10-24T16:28:12.108674Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:150:2172]. Cookie: 426297075144541381. Data: { } 2025-10-24T16:28:12.108689Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:150:2172], cookie=426297075144541381) >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28971, MsgBus: 5293 2025-10-24T16:28:08.459723Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825923857319167:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.459775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f05/r3tmp/tmpzzwOoa/pdisk_1.dat 2025-10-24T16:28:08.528153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.530070Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.530278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825923857318915:2081] 1761323288455765 != 1761323288455768 TServer::EnableGrpc on GrpcPort 28971, node 1 2025-10-24T16:28:08.539667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.539684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.539686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.539732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5293 2025-10-24T16:28:08.562399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.562440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.563451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.588565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.598448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.619653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.655743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.675262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.708494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.845139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923857320557:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.845163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.845308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923857320567:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.845317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.901854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.910342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.923155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.936871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.951237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.960377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.972335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.986930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.012598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928152288725:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.012628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.012646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928152288730:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.012658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928152288732:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.012664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.013442Z node 1 :FLAT_TX_SCHEMESHARD WA ... k executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:11.576283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.589636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.603751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.619394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.633934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.646918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.659653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.679792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.689447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.719159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.730053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (Bool 'false)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($31) (FromFlow (Filter (ToFlow $31) (lambda '($32) (And (Exists (Member $32 '"Fk")) (Coalesce (== (Member $32 '"Value") (String '"Value1")) $4)))))) '('('"_logical_id" '1286) '('"_id" '"1bb54c0d-55f1f9f3-e0d15901-10b8a787")))) (let $6 (DqCnUnionAll (TDqOutput $5 '0))) (let $7 (DqPhyStage '($6) (lambda '($33) $33) '('('"_logical_id" '1670) '('"_id" '"2d1bf14f-fb6c565-d60d93e-e9656ea2")))) (let $8 (DqCnResult (TDqOutput $7 '0) '())) (let $9 '('('"type" '"generic"))) (let $10 (KqpPhysicalTx '($5 $7) '($8) '() $9)) (let $11 '"%kqp%tx_result_binding_0_0") (let $12 (DataType 'Int32)) (let $13 (OptionalType $12)) (let $14 '('"Key" $13)) (let $15 (OptionalType (DataType 'String))) (let $16 (StructType '('"Fk" $13) $14 '('"Value" $15))) (let $17 (ListType $16)) (let $18 %kqp%tx_result_binding_0_0) (let $19 '('('"_logical_id" '1329) '('"_id" '"f0158486-3a05ac59-23224b04-f87738b9") '('"_partition_mode" '"single"))) (let $20 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $18 (lambda '($34) (Member $34 '"Fk")) (Void) (Void) (lambda '($35) (Map (Filter (FlatMap $35 (lambda '($36) (Map (Take (Nth $36 '1) (Uint64 '1)) (lambda '($37) (AsStruct '('"Fk" (Member $37 '"Fk"))))))) (lambda '($38) (Exists (Member $38 '"Fk")))) (lambda '($39) (AsStruct '('"Key" (Member $39 '"Fk"))))))))) $19)) (let $21 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $22 (KqpCnStreamLookup (TDqOutput $20 '0) $21 '('"Key" '"Value") (ListType (StructType $14)) '('('"Strategy" '"LookupRows")))) (let $23 (DqPhyStage '($22) (lambda '($40) (block '( (let $41 '('Many 'Hashed 'Compact)) (let $42 (SqueezeToDict (FlatMap (ToFlow $18) (lambda '($43) (block '( (let $44 (Member $43 '"Fk")) (let $45 (Nothing (OptionalType (TupleType $12 $16)))) (let $46 (IfPresent $44 (lambda '($47) (Just '($47 $43))) $45)) (return (If (Exists $44) $46 $45)) )))) (lambda '($48) (Nth $48 '0)) (lambda '($49) (Nth $49 '1)) $41)) (return (FromFlow (FlatMap $42 (lambda '($50) (block '( (let $51 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $52 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (Filter (ToFlow $40) (lambda '($53) (And (Exists (Member $53 '"Key")) (Coalesce (!= (Member $53 '"Value") (String '"Value22")) $4)))) $50 '"Inner" '('"Key") '('"Fk") $51 $52 '('"r.Key") '('"l.Fk"))) )))))) ))) '('('"_logical_id" '1507) '('"_id" '"f482f5ea-642d0fce-e2b18415-65e3bc87")))) (let $24 (DqCnUnionAll (TDqOutput $23 '0))) (let $25 (DqPhyStage '($24) (lambda '($54) $54) '('('"_logical_id" '2122) '('"_id" '"c3446d1-1218e8d-aae640e0-719575bb")))) (let $26 '($20 $23 $25)) (let $27 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $28 (DqCnResult (TDqOutput $25 '0) $27)) (let $29 (KqpTxResultBinding $17 '0 '0)) (let $30 (KqpPhysicalTx $26 '($28) '('($11 $29)) $9)) (return (KqpPhysicalQuery '($10 $30) '((KqpTxResultBinding (ListType (StructType '('"l.Fk" $13) '('"l.Key" $13) '('"l.Value" $15) '('"r.Key" $13) '('"r.Value" $15))) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (Bool 'false)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($34) (FromFlow (Filter (ToFlow $34) (lambda '($35) (And (Exists (Member $35 '"Fk")) (Coalesce (== (Member $35 '"Value") (String '"Value1")) $4)))))) '('('"_logical_id" '1314) '('"_id" '"984091f1-17c00987-9afa950a-133ce8b4")))) (let $6 (DqCnUnionAll (TDqOutput $5 '0))) (let $7 (DqPhyStage '($6) (lambda '($36) $36) '('('"_logical_id" '1775) '('"_id" '"bca4cb0-fc011ece-1002456b-e6a1486e")))) (let $8 (DqCnResult (TDqOutput $7 '0) '())) (let $9 '('('"type" '"data"))) (let $10 (KqpPhysicalTx '($5 $7) '($8) '() $9)) (let $11 '"%kqp%tx_result_binding_0_0") (let $12 (DataType 'Int32)) (let $13 (OptionalType $12)) (let $14 '('"Key" $13)) (let $15 (OptionalType (DataType 'String))) (let $16 (StructType '('"Fk" $13) $14 '('"Value" $15))) (let $17 (ListType $16)) (let $18 %kqp%tx_result_binding_0_0) (let $19 '('('"_logical_id" '1359) '('"_id" '"c5fa9f1c-d32ddbc4-f260df45-1869a833") '('"_partition_mode" '"single"))) (let $20 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $18 (lambda '($37) (Member $37 '"Fk")) (Void) (Void) (lambda '($38) (Map (Filter (FlatMap $38 (lambda '($39) (Map (Take (Nth $39 '1) (Uint64 '1)) (lambda '($40) (AsStruct '('"Fk" (Member $40 '"Fk"))))))) (lambda '($41) (Exists (Member $41 '"Fk")))) (lambda '($42) (AsStruct '('"Key" (Member $42 '"Fk"))))))))) $19)) (let $21 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $22 (KqpCnStreamLookup (TDqOutput $20 '0) $21 '('"Key" '"Value") (ListType (StructType $14)) '('('"Strategy" '"LookupRows")))) (let $23 (Uint64 '"1001")) (let $24 (StructType '('"l.Fk" $13) '('"l.Key" $13) '('"l.Value" $15) '('"r.Key" $13) '('"r.Value" $15))) (let $25 '('('"_logical_id" '1659) '('"_id" '"5127e5dc-a5331e36-962e8ec4-fd414e32") '('"_wide_channels" $24))) (let $26 (DqPhyStage '($22) (lambda '($43) (block '( (let $44 '('Many 'Hashed 'Compact)) (let $45 (SqueezeToDict (FlatMap (ToFlow $18) (lambda '($47) (block '( (let $48 (Member $47 '"Fk")) (let $49 (Nothing (OptionalType (TupleType $12 $16)))) (let $50 (IfPresent $48 (lambda '($51) (Just '($51 $47))) $49)) (return (If (Exists $48) $50 $49)) )))) (lambda '($52) (Nth $52 '0)) (lambda '($53) (Nth $53 '1)) $44)) (let $46 (lambda '($58) (Member $58 '"l.Fk") (Member $58 '"l.Key") (Member $58 '"l.Value") (Member $58 '"r.Key") (Member $58 '"r.Value"))) (return (FromFlow (ExpandMap (Take (FlatMap $45 (lambda '($54) (block '( (let $55 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $56 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (Filter (ToFlow $43) (lambda '($57) (And (Exists (Member $57 '"Key")) (Coalesce (!= (Member $57 '"Value") (String '"Value22")) $4)))) $54 '"Inner" '('"Key") '('"Fk") $55 $56 '('"r.Key") '('"l.Fk"))) )))) $23) $46))) ))) $25)) (let $27 (DqCnUnionAll (TDqOutput $26 '0))) (let $28 (DqPhyStage '($27) (lambda '($59) (FromFlow (NarrowMap (Take (ToFlow $59) $23) (lambda '($60 $61 $62 $63 $64) (AsStruct '('"l.Fk" $60) '('"l.Key" $61) '('"l.Value" $62) '('"r.Key" $63) '('"r.Value" $64)))))) '('('"_logical_id" '1672) '('"_id" '"d1a7730-64ed3bd7-965335d9-b86cd0ef")))) (let $29 '($20 $26 $28)) (let $30 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $31 (DqCnResult (TDqOutput $28 '0) $30)) (let $32 (KqpTxResultBinding $17 '0 '0)) (let $33 (KqpPhysicalTx $29 '($31) '('($11 $32)) $9)) (return (KqpPhysicalQuery '($10 $33) '((KqpTxResultBinding (ListType $24) '1 '0)) '('('"type" '"data_query")))) ) |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 13777, MsgBus: 4085 2025-10-24T16:28:08.445228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825923138021559:2141];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.445270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:08.450278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001eed/r3tmp/tmpdRfODN/pdisk_1.dat 2025-10-24T16:28:08.472944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.472973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.473991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13777, node 1 2025-10-24T16:28:08.489076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.489086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.489088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.489133Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:08.491573Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.491890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825923138021456:2081] 1761323288444489 != 1761323288444492 TClient is connected to server localhost:4085 TClient is connected to server localhost:4085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:08.551952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.558733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.564241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.588500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.614196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.629742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.860556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923138023108:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.860585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.860637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923138023118:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.860646Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.922578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.935379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.944003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.957761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.972072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.985784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.000064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.014172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.032398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927432991277:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.032424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927432991282:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.032440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.032511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927432991285:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.032528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.033297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__oper ... eshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.590423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.605629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.618946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.634303Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.703715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.725384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.737800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.796187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.816125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1108) '('"_id" '"c94ae6d-af1689b2-ebcb11cf-214ebcd1") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($36) $36) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '852) '('"_id" '"f640e1df-5464f3ef-e1638283-e40b6eeb") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($37) (Member $37 '"Fk")) (Void) (Void) (lambda '($38) (Map (Filter (FlatMap $38 (lambda '($39) (Map (Take (Nth $39 '1) (Uint64 '1)) (lambda '($40) (AsStruct '('"Fk" (Member $40 '"Fk"))))))) (lambda '($41) (Exists (Member $41 '"Fk")))) (lambda '($42) (AsStruct '('"Key" (Member $42 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '934) '('"_id" '"5e1e1473-28e6cac6-650ae40b-60e388df") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($43) (block '( (let $44 (lambda '($46) (block '( (let $47 (Member $46 '"Key")) (return $47 (Member $46 '"Value") (Exists $47)) )))) (let $45 (lambda '($51 $52 $53) $51 $52)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $43) $44) (lambda '($48 $49 $50) $50)) $45))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $27 '('('"_logical_id" '1063) '('"_id" '"8f64cfed-d1e6fbb6-2207b11d-205055d3") '('"_wide_channels" $26))) (let $28 (DqPhyStage '($25) (lambda '($54) (block '( (let $55 '('Many 'Hashed 'Compact)) (let $56 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $54) (lambda '($59 $60) (Exists $59))) (lambda '($61 $62) (IfPresent $61 (lambda '($63) (Just '($63 (AsStruct '('"Key" $61) '('"Value" $62))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($64) (Nth $64 '0)) (lambda '($65) (Nth $65 '1)) $55)) (let $57 (Sort (FlatMap $56 (lambda '($66) (block '( (let $67 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $68 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $66 '"Left" '('"Fk") '('"Key") $67 $68 '('"l.Fk") '('"r.Key"))) )))) (Bool 'true) (lambda '($69) (Member $69 '"l.Key")))) (let $58 (lambda '($70) (Member $70 '"l.Fk") (Member $70 '"l.Key") (Member $70 '"l.Value") (Member $70 '"r.Key") (Member $70 '"r.Value"))) (return (FromFlow (ExpandMap $57 $58))) ))) $27)) (let $29 (DqCnMerge (TDqOutput $28 '0) '('('1 '"Asc")))) (let $30 (DqPhyStage '($29) (lambda '($71) (FromFlow (NarrowMap (ToFlow $71) (lambda '($72 $73 $74 $75 $76) (AsStruct '('"l.Fk" $72) '('"l.Key" $73) '('"l.Value" $74) '('"r.Key" $75) '('"r.Value" $76)))))) '('('"_logical_id" '1075) '('"_id" '"ea328be3-5734980-8c024564-8cb92ac0")))) (let $31 '($19 $24 $28 $30)) (let $32 '('"r.Value" '"l.Key" '"r.Key" '"l.Value" '"l.Fk")) (let $33 (DqCnResult (TDqOutput $30 '0) $32)) (let $34 (KqpTxResultBinding $16 '0 '0)) (let $35 (KqpPhysicalTx $31 '($33) '('($10 $34)) $8)) (return (KqpPhysicalQuery '($9 $35) '((KqpTxResultBinding (ListType $26) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1141) '('"_id" '"30cc4d7a-38068298-ca49418b-bd95fde2") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($37) $37) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '883) '('"_id" '"a5c2880f-ac7dc4e0-52916156-2c36be0d") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($38) (Member $38 '"Fk")) (Void) (Void) (lambda '($39) (Map (Filter (FlatMap $39 (lambda '($40) (Map (Take (Nth $40 '1) (Uint64 '1)) (lambda '($41) (AsStruct '('"Fk" (Member $41 '"Fk"))))))) (lambda '($42) (Exists (Member $42 '"Fk")))) (lambda '($43) (AsStruct '('"Key" (Member $43 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '965) '('"_id" '"a2cafdae-bd224c6b-745423c5-c8f927a8") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($44) (block '( (let $45 (lambda '($47) (block '( (let $48 (Member $47 '"Key")) (return $48 (Member $47 '"Value") (Exists $48)) )))) (let $46 (lambda '($52 $53 $54) $52 $53)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $44) $45) (lambda '($49 $50 $51) $51)) $46))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (Uint64 '"1001")) (let $27 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $28 '('('"_logical_id" '1094) '('"_id" '"df1d22c3-a5c5d0f7-8ad55234-74e723f3") '('"_wide_channels" $27))) (let $29 (DqPhyStage '($25) (lambda '($55) (block '( (let $56 '('Many 'Hashed 'Compact)) (let $57 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $55) (lambda '($60 $61) (Exists $60))) (lambda '($62 $63) (IfPresent $62 (lambda '($64) (Just '($64 (AsStruct '('"Key" $62) '('"Value" $63))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($65) (Nth $65 '0)) (lambda '($66) (Nth $66 '1)) $56)) (let $58 (TopSort (FlatMap $57 (lambda '($67) (block '( (let $68 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $69 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $67 '"Left" '('"Fk") '('"Key") $68 $69 '('"l.Fk") '('"r.Key"))) )))) $26 (Bool 'true) (lambda '($70) (Member $70 '"l.Key")))) (let $59 (lambda '($71) (Member $71 '"l.Fk") (Member $71 '"l.Key") (Member $71 '"l.Value") (Member $71 '"r.Key") (Member $71 '"r.Value"))) (return (FromFlow (ExpandMap $58 $59))) ))) $28)) (let $30 (DqCnMerge (TDqOutput $29 '0) '('('1 '"Asc")))) (let $31 (DqPhyStage '($30) (lambda '($72) (FromFlow (NarrowMap (Take (ToFlow $72) $26) (lambda '($73 $74 $75 $76 $77) (AsStruct '('"l.Fk" $73) '('"l.Key" $74) '('"l.Value" $75) '('"r.Key" $76) '('"r.Value" $77)))))) '('('"_logical_id" '1107) '('"_id" '"c6d73534-d52ed665-f850f69d-559a3299")))) (let $32 '($19 $24 $29 $31)) (let $33 '('"r.Value" '"l.Key" '"r.Key" '"l.Value" '"l.Fk")) (let $34 (DqCnResult (TDqOutput $31 '0) $33)) (let $35 (KqpTxResultBinding $16 '0 '0)) (let $36 (KqpPhysicalTx $32 '($34) '('($10 $35)) $8)) (return (KqpPhysicalQuery '($9 $36) '((KqpTxResultBinding (ListType $27) '1 '0)) '('('"type" '"data_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19857, MsgBus: 20343 2025-10-24T16:28:08.202763Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825923530834648:2143];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.202931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f15/r3tmp/tmphcruoy/pdisk_1.dat 2025-10-24T16:28:08.245904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.256793Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825923530834539:2081] 1761323288202051 != 1761323288202054 2025-10-24T16:28:08.257051Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19857, node 1 2025-10-24T16:28:08.266012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.266025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.266026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.266072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20343 TClient is connected to server localhost:20343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:08.312180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.312209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.313297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.316285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.327988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.346025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:08.379838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.398272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.472274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.642193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923530836190:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.642242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.642368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923530836200:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.642403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.702244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.714610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.729428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.753878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.762302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.776041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.789705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.804980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.827496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923530837062:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923530837068:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825923530837067:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.828782Z node 1 :FLAT_TX_SCHEMESHARD ... _create_table.cpp:686) 2025-10-24T16:28:11.405690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.418370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.437487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.449147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.463419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.477843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.496237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.508113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.520931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.536240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.600250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.682448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.700768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk" '"Value") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1723) '('"_id" '"e931c4eb-df9f13e5-cac354ff-dfb4cf97") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($29) $29) $4)) (let $6 (DqCnResult (TDqOutput $5 '0) '())) (let $7 '('('"type" '"generic"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (DataType 'Int32)) (let $11 (OptionalType $10)) (let $12 (OptionalType (DataType 'String))) (let $13 (StructType '('"Fk" $11) '('"Value" $12))) (let $14 (ListType $13)) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '1310) '('"_id" '"5470518c-4e19aaaf-8c60a494-9d0ecef8") $3)) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($30) (Member $30 '"Fk")) (Void) (Void) (lambda '($31) (Map (Filter (FlatMap $31 (lambda '($32) (Map (Take (Nth $32 '1) (Uint64 '1)) (lambda '($33) (AsStruct '('"Fk" (Member $33 '"Fk"))))))) (lambda '($34) (Exists (Member $34 '"Fk")))) (lambda '($35) (AsStruct '('"Key" (Member $35 '"Fk"))))))))) $16)) (let $18 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $19 (KqpCnStreamLookup (TDqOutput $17 '0) $18 '('"Key" '"Value") (ListType (StructType '('"Key" $11))) '('('"Strategy" '"LookupRows")))) (let $20 (StructType '('"l.Value" $12) '('"r.Value" $12))) (let $21 '('('"_logical_id" '1673) '('"_id" '"9d708ece-7437e8c6-d99d53bc-8f6f7ac1") '('"_wide_channels" $20))) (let $22 (DqPhyStage '($19) (lambda '($36) (block '( (let $37 '('Many 'Hashed 'Compact)) (let $38 (SqueezeToDict (FlatMap (ToFlow $15) (lambda '($42) (block '( (let $43 (Member $42 '"Fk")) (let $44 (Nothing (OptionalType (TupleType $10 $13)))) (let $45 (IfPresent $43 (lambda '($46) (Just '($46 $42))) $44)) (return (If (Exists $43) $45 $44)) )))) (lambda '($47) (Nth $47 '0)) (lambda '($48) (Nth $48 '1)) $37)) (let $39 (Bool 'true)) (let $40 (Sort (FlatMap $38 (lambda '($49) (MapJoinCore (Filter (ToFlow $36) (lambda '($50) (And (Exists (Member $50 '"Key")) (Exists (Member $50 '"Value"))))) $49 '"Inner" '('"Key") '('"Fk") '('"Value" '"r.Value") '('"Value" '"l.Value") '('"r.Key") '('"l.Fk")))) '($39 $39) (lambda '($51) '((Member $51 '"l.Value") (Member $51 '"r.Value"))))) (let $41 (lambda '($52) (Member $52 '"l.Value") (Member $52 '"r.Value"))) (return (FromFlow (ExpandMap $40 $41))) ))) $21)) (let $23 (DqCnMerge (TDqOutput $22 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $24 (DqPhyStage '($23) (lambda '($53) (FromFlow (NarrowMap (ToFlow $53) (lambda '($54 $55) (AsStruct '('"l.Value" $54) '('"r.Value" $55)))))) '('('"_logical_id" '1685) '('"_id" '"f6d480c6-59078c77-fc6df1ea-92fb7d2e")))) (let $25 '($17 $22 $24)) (let $26 (DqCnResult (TDqOutput $24 '0) '('"l.Value" '"r.Value"))) (let $27 (KqpTxResultBinding $14 '0 '0)) (let $28 (KqpPhysicalTx $25 '($26) '('($9 $27)) $7)) (return (KqpPhysicalQuery '($8 $28) '((KqpTxResultBinding (ListType $20) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk" '"Value") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1756) '('"_id" '"f1eecc-9da9ef59-8e1073b5-e4576001") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($30) $30) $4)) (let $6 (DqCnResult (TDqOutput $5 '0) '())) (let $7 '('('"type" '"data"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (DataType 'Int32)) (let $11 (OptionalType $10)) (let $12 (OptionalType (DataType 'String))) (let $13 (StructType '('"Fk" $11) '('"Value" $12))) (let $14 (ListType $13)) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '1341) '('"_id" '"6118eec-6fcfcc13-15000196-bed9dc21") $3)) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($31) (Member $31 '"Fk")) (Void) (Void) (lambda '($32) (Map (Filter (FlatMap $32 (lambda '($33) (Map (Take (Nth $33 '1) (Uint64 '1)) (lambda '($34) (AsStruct '('"Fk" (Member $34 '"Fk"))))))) (lambda '($35) (Exists (Member $35 '"Fk")))) (lambda '($36) (AsStruct '('"Key" (Member $36 '"Fk"))))))))) $16)) (let $18 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $19 (KqpCnStreamLookup (TDqOutput $17 '0) $18 '('"Key" '"Value") (ListType (StructType '('"Key" $11))) '('('"Strategy" '"LookupRows")))) (let $20 (Uint64 '"1001")) (let $21 (StructType '('"l.Value" $12) '('"r.Value" $12))) (let $22 '('('"_logical_id" '1704) '('"_id" '"602379e5-f8945493-6421a97e-25bcdece") '('"_wide_channels" $21))) (let $23 (DqPhyStage '($19) (lambda '($37) (block '( (let $38 '('Many 'Hashed 'Compact)) (let $39 (SqueezeToDict (FlatMap (ToFlow $15) (lambda '($43) (block '( (let $44 (Member $43 '"Fk")) (let $45 (Nothing (OptionalType (TupleType $10 $13)))) (let $46 (IfPresent $44 (lambda '($47) (Just '($47 $43))) $45)) (return (If (Exists $44) $46 $45)) )))) (lambda '($48) (Nth $48 '0)) (lambda '($49) (Nth $49 '1)) $38)) (let $40 (Bool 'true)) (let $41 (TopSort (FlatMap $39 (lambda '($50) (MapJoinCore (Filter (ToFlow $37) (lambda '($51) (And (Exists (Member $51 '"Key")) (Exists (Member $51 '"Value"))))) $50 '"Inner" '('"Key") '('"Fk") '('"Value" '"r.Value") '('"Value" '"l.Value") '('"r.Key") '('"l.Fk")))) $20 '($40 $40) (lambda '($52) '((Member $52 '"l.Value") (Member $52 '"r.Value"))))) (let $42 (lambda '($53) (Member $53 '"l.Value") (Member $53 '"r.Value"))) (return (FromFlow (ExpandMap $41 $42))) ))) $22)) (let $24 (DqCnMerge (TDqOutput $23 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $25 (DqPhyStage '($24) (lambda '($54) (FromFlow (NarrowMap (Take (ToFlow $54) $20) (lambda '($55 $56) (AsStruct '('"l.Value" $55) '('"r.Value" $56)))))) '('('"_logical_id" '1717) '('"_id" '"1c847312-99f7e5b-7f69c386-350d90c")))) (let $26 '($17 $23 $25)) (let $27 (DqCnResult (TDqOutput $25 '0) '('"l.Value" '"r.Value"))) (let $28 (KqpTxResultBinding $14 '0 '0)) (let $29 (KqpPhysicalTx $26 '($27) '('($9 $28)) $7)) (return (KqpPhysicalQuery '($8 $29) '((KqpTxResultBinding (ListType $21) '1 '0)) '('('"type" '"data_query")))) ) >> KqpLimits::ReplySizeExceeded [GOOD] |61.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |61.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 62379, MsgBus: 12140 2025-10-24T16:27:59.704701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825884300703428:2064];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.704723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002074/r3tmp/tmpBBBP5B/pdisk_1.dat 2025-10-24T16:27:59.747262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:59.758353Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:59.762870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825884300703404:2081] 1761323279704444 != 1761323279704447 TServer::EnableGrpc on GrpcPort 62379, node 1 2025-10-24T16:27:59.774432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.774439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.774441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.774468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12140 2025-10-24T16:27:59.808473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.808509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.809680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.833235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.846632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.936401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:00.706899Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:02.547951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825897185606855:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.547943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825897185606863:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.548004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.548074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825897185606870:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.548082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:02.549054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:02.551989Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825897185606869:2430], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-10-24T16:28:02.635485Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825897185606922:2957] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:02.704605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:04.721721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564825884300703428:2064];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:04.721777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19674, MsgBus: 10436 2025-10-24T16:28:05.172831Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825910418836513:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:05.172849Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002074/r3tmp/tmpHQuPM2/pdisk_1.dat 2025-10-24T16:28:05.175938Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:05.187174Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:05.187477Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564825910418836484:2081] 1761323285172649 != 1761323285172652 2025-10-24T16:28:05.190444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:05.190471Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:05.191716Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19674, node 2 2025-10-24T16:28:05.197382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:05.197398Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:05.197400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:05.197441Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10436 TClient is connected to server localhost:10436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:05.233687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called ... rom file: (empty maybe) 2025-10-24T16:28:11.067325Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27418 TClient is connected to server localhost:27418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:11.172340Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:11.181267Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:11.196786Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:28:11.209071Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:11.241135Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.264146Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:11.538726Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825934055330608:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.538756Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.538919Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825934055330617:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.538942Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.554233Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.564483Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.578548Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.593213Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.606303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.635898Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.648649Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.677437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.722729Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825934055331481:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.722771Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.722866Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825934055331486:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.722877Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825934055331487:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.722901Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.723895Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:11.727817Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825934055331490:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:11.798940Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825934055331542:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:12.028378Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:12.129663Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:13.243688Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=NDVkMTA5ZS1jYjgzNjdhOC1iMWU0Yjc1Mi1mYTMwOWEy, ActorId: [5:7564825938350299143:2522], ActorState: ExecuteState, TraceId: 01k8bgs6e8aptyxmy8tvgnvdce, Create QueryResponse for error on request, msg: |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> KqpJoinOrder::SortingsWithLookupJoin2-RemoveLimitOperator [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> LocalPartition::WithoutPartitionWithSplit >> TxUsage::WriteToTopic_Demo_42_Table |61.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsWithLookupJoin2-RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 22261, MsgBus: 21716 2025-10-24T16:28:08.203946Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922971541803:2070];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.203965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f10/r3tmp/tmpyl2Kx5/pdisk_1.dat 2025-10-24T16:28:08.240276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.255372Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.255571Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825922971541770:2081] 1761323288203620 != 1761323288203623 TServer::EnableGrpc on GrpcPort 22261, node 1 2025-10-24T16:28:08.262520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.262536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.262537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.262573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21716 TClient is connected to server localhost:21716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:08.311927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.311971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.313147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.323090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.408961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.583218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922971542448:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.583238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922971542459:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.583244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.583367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922971542463:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.583377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.584146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.586352Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825922971542462:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:08.670812Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825922971542515:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.733578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.764116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.773569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.781809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.789774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.819067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.826961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.838759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.852553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.866226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.880613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.897588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.908583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... ult=not_found; 2025-10-24T16:28:14.336180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.336183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.336572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.336581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.336584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.337656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.338762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.339967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.341218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.342491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.343774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.344484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.344499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.344501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.349340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.349362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:14.349365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; |61.5%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63605, MsgBus: 15071 2025-10-24T16:28:06.849750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825913837010500:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:06.849799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f1d/r3tmp/tmp4TzUve/pdisk_1.dat 2025-10-24T16:28:06.893469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:06.905435Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:06.905707Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825913837010370:2081] 1761323286848663 != 1761323286848666 TServer::EnableGrpc on GrpcPort 63605, node 1 2025-10-24T16:28:06.911934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:06.911950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:06.911952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:06.911989Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15071 TClient is connected to server localhost:15071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:06.952924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:06.952958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:06.954102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:06.959922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:07.085312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:07.201802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825918131978353:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.201811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825918131978342:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.201833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.201931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825918131978357:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.201949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:07.202776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:07.204965Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825918131978356:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:07.292995Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825918131978409:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:07.346992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.365436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.373031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.382283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.396293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.429264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.437140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.444581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.459364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.473572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.487318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.501200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:07.515689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... ult=not_found; 2025-10-24T16:28:13.611677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.611679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.612540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.612560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.612562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.613674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.613683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.613686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.614816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.614824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.614826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.617007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.617026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.617028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.618057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.618065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.618067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.619103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.619112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.619114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.615845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.620160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.620163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.621227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.621236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.621238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.622431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.622452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.622454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.623684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.623698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.623700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.624610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.624619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.624621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.625537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.625545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.625547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.626397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.626404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.626407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.627944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.627962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.627965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.628886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.628895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:13.628898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; >> KqpJoinOrder::SortingsByPKWithLookupJoin+RemoveLimitOperator [GOOD] >> KqpJoinOrder::SortingsWithLookupJoin2+RemoveLimitOperator [GOOD] |61.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |61.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |61.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |61.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> DataShardTxOrder::DelayData [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table >> KqpJoinOrder::SortingsByPK+RemoveLimitOperator [GOOD] >> KqpSinkMvcc::SnapshotExpiration |61.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |61.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> KqpTx::RollbackManyTx ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsWithLookupJoin2+RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 27336, MsgBus: 15631 2025-10-24T16:28:08.370495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922265087881:2146];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.370599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ef0/r3tmp/tmpsrZxWa/pdisk_1.dat 2025-10-24T16:28:08.433288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 27336, node 1 2025-10-24T16:28:08.445511Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.456039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.456051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.456052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.456087Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15631 2025-10-24T16:28:08.478045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.478074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.479301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.520700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.523757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.606248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.796736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922265088441:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.796761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.796783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922265088452:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.797406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922265088455:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.797422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.797803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.800056Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825922265088456:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:08.875164Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825922265088508:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.925976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.943924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.951783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.968278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.978472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.017366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.025356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.038575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.048176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.055291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.069778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.084099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.097463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo un ... 74976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.225926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.226674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.226691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.226694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038428;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.227833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.227843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.227861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038470;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.228907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.228917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.228923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038464;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.231445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.231461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.231463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.232631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.232640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.232643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.233782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.233793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.233797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.235119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.235128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.235130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.236445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.236461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.236463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038488;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.237549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.237559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.237561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038502;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.238694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.238702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.238704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.248061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.248082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.248085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.249240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.249250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.249253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.250388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.250398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.250400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.251485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.251494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.251497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.258126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.258152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.258154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found;
:8:3: Warning: ORDER BY without LIMIT in subquery will be ignored, code: 4504
:8:3: Warning: ORDER BY without LIMIT in subquery will be ignored, code: 4504 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-10-24T16:27:42.971108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:42.982552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:42.982571Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.982940Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:42.982980Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:42.983019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:42.983787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:42.991297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:42.991337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:42.991440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:42.991446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:42.991452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:42.991508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:42.991516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:42.991525Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:43.027599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.030498Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:43.030566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.030587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:43.030591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:43.030596Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:43.030600Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.030647Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.030654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.030727Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:43.030750Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:43.030755Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.030762Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.030769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:43.030774Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:43.030779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:43.030784Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:43.030788Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:43.030796Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.030800Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.030805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:43.031134Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:43.031140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.031149Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:43.031183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:43.031192Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:43.031201Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:43.031207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.031210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:43.031214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:43.031217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.031272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.031275Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:43.031278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:43.031280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.031289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:43.031291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:43.031294Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:43.031296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.031301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.043421Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:43.043456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.043463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.043473Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:43.043486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:43.043577Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.043584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.043590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:43.043605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:43.043609Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:43.043639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.043645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:43.043648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:43.043652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:43.048034Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:43.048075Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.048162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.048171Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.048185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.048194Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:43.048201Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:43.048210Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:43.048216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2025-10-24T16:28:18.380719Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:28:18.380725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:28:18.380735Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-10-24T16:28:18.380962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:240:2232], Recipient [1:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:18.380972Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:18.380985Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:28:18.380992Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:28:18.380997Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2025-10-24T16:28:18.381002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-10-24T16:28:18.381011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-10-24T16:28:18.381021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-10-24T16:28:18.381026Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-10-24T16:28:18.381281Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-10-24T16:28:18.381288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-10-24T16:28:18.381297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-10-24T16:28:18.381301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-10-24T16:28:18.381308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-10-24T16:28:18.381316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:18.381321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-10-24T16:28:18.381336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2025-10-24T16:28:18.381341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-10-24T16:28:18.381344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2025-10-24T16:28:18.381354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381358Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:18.381363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-10-24T16:28:18.381367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-10-24T16:28:18.381379Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381382Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-10-24T16:28:18.381386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-10-24T16:28:18.381390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-10-24T16:28:18.381395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-10-24T16:28:18.381402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-10-24T16:28:18.381406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-10-24T16:28:18.381411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-10-24T16:28:18.381418Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-10-24T16:28:18.381422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-10-24T16:28:18.381426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-10-24T16:28:18.381433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2025-10-24T16:28:18.381437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2025-10-24T16:28:18.381441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.381445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2025-10-24T16:28:18.381448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-10-24T16:28:18.381453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-10-24T16:28:18.381541Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-10-24T16:28:18.381550Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:28:18.480811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:28:18.480841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-10-24T16:28:18.480848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-10-24T16:28:18.480855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-10-24T16:28:18.480947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-10-24T16:28:18.480952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-10-24T16:28:18.480957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-10-24T16:28:18.480961Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-10-24T16:28:18.480968Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000005:507] at 9437184 is Executed 2025-10-24T16:28:18.480972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-10-24T16:28:18.480977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000005:507] at 9437184 has finished 2025-10-24T16:28:18.480984Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:18.480989Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:28:18.480994Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:28:18.480998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:28:18.492572Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-10-24T16:28:18.492597Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-10-24T16:28:18.492615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:28:18.492625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-10-24T16:28:18.492646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:28:18.492656Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:28:18.492707Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:28:18.492713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-10-24T16:28:18.492721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:105:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:28:18.492725Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsByPKWithLookupJoin+RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 21666, MsgBus: 12831 2025-10-24T16:28:09.523914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825925615845366:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:09.523935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee4/r3tmp/tmphqKg7j/pdisk_1.dat 2025-10-24T16:28:09.592697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:09.597133Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:09.597318Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825925615845340:2081] 1761323289523719 != 1761323289523722 TServer::EnableGrpc on GrpcPort 21666, node 1 2025-10-24T16:28:09.607320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:09.607332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:09.607334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:09.607372Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12831 2025-10-24T16:28:09.627842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:09.627880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:09.630165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:09.674483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:09.677406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:09.880563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:09.945260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825925615846016:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.945299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.945502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825925615846028:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.945517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825925615846029:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.945523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.946376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:09.949138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:28:09.949293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825925615846032:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:10.032891Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825929910813379:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.104260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.130353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.140345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.170731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.187182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.233677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.250571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.262901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.275826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.288295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.308576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.333282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... ult=not_found; 2025-10-24T16:28:16.614073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.614076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.616354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.616371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.616374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.617563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.617573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.617576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.618553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.618564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.618566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.619582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.619597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.619602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.620245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.620261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.620263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.621259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.621268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.621271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.622395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.622407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.622409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.623420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.623430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.623432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.624873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.624888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.624890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.625911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.625920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.625922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.627038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.627049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.627052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.628571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.628590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.628593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.629715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.629723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.629725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.630735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.630744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.630747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.637267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.637295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:16.637298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; >> KqpJoinOrder::UdfConstantFold-ColumnStore [GOOD] >> KqpSnapshotRead::TestReadOnly+withSink |61.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |61.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsByPK+RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 15533, MsgBus: 25038 2025-10-24T16:28:08.356121Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825921852279034:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.356152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001eff/r3tmp/tmpagQ4es/pdisk_1.dat 2025-10-24T16:28:08.411463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.411587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.415226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.420232Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.420736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15533, node 1 2025-10-24T16:28:08.444646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.444657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.444659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.444708Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25038 TClient is connected to server localhost:25038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.522725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.525708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.717327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.764373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921852279674:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921852279686:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921852279687:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.765231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.767161Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825921852279690:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:08.860820Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825921852279741:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.908801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.925232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.942577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.952056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.960562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.991995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:08.999732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.013151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.027625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.041290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.055083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.069246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.083091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo un ... ult=not_found; 2025-10-24T16:28:17.190185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.190188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.191292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.191301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.191304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.193065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.193079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.193082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.194209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.194217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.194220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.195283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.195292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.195295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.196813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.196827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.196831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.197813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.197822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.197824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.199323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.199339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.199341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.200351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.200359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.200361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.201290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.201299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.201301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.202957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.202978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.202981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.204109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.204118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.204120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.205869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.206745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.206753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.206755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.215238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.215758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.215769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; >> KqpJoinOrder::Sortings4Year+RemoveLimitOperator [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::UdfConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25904, MsgBus: 29242 2025-10-24T16:28:08.577473Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922727681564:2150];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.577610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ef2/r3tmp/tmprF4jGP/pdisk_1.dat 2025-10-24T16:28:08.620675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.640197Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25904, node 1 2025-10-24T16:28:08.648431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.648446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.648448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.648496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29242 2025-10-24T16:28:08.678458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.678479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.679468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.714468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.720478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.853093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:09.038306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927022649420:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.038308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927022649409:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.038334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.038414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927022649424:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.038435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.039024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:09.041425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:28:09.041482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825927022649423:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:09.106084Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825927022649476:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:09.164835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.184604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.209004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.217342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.230172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.263901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.276226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.286624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.300341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.314270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.328503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.343042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemes ... ult=not_found; 2025-10-24T16:28:17.976348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.976350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.977404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.977413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.977416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.978473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.978482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.978484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.979540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.979549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.979551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.980951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.980966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.980969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.982033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.982043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.982046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.983229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.983238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.983242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.984892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.984909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.984914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.986056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.986070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.986072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.987251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.987261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.987263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.988633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.988650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.988653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.989997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.990012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.990016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.991133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.991143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.991145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.992248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.992257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.992260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.993365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.993375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.993378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.994573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.994582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:17.994601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; >> KqpSinkLocks::OlapUncommittedRead >> KqpJoinOrder::SortingsWithLookupJoin1+RemoveLimitOperator [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpTx::CommitRequired >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Sortings4Year+RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 6639, MsgBus: 16170 2025-10-24T16:28:11.469997Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825936127373650:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:11.470019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ed6/r3tmp/tmpQePTBm/pdisk_1.dat 2025-10-24T16:28:11.511599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:11.525957Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825936127373613:2081] 1761323291469605 != 1761323291469608 2025-10-24T16:28:11.526464Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6639, node 1 2025-10-24T16:28:11.532325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:11.532337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:11.532339Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:11.532376Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16170 2025-10-24T16:28:11.577547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:11.577582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16170 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:11.578665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:11.591150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:11.749605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:11.889979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825936127374289:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.890007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.890244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825936127374301:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.890254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825936127374302:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.890315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:11.891148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:11.894106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:28:11.894190Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825936127374305:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:11.985145Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825936127374356:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:12.029517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.056232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.081844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.100413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.113476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.156576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.175234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.201944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.222277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.240557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.262969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:12.285263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemes ... ult=not_found; 2025-10-24T16:28:19.265378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.265380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.266645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.270206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.270224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.270226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.272399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.272421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.272423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.273543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.273556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.273559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.274549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.274559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.274561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.275692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.275702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.275704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.276495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.276514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.276516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.277311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.277330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.277332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.278804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.279480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.279501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.279503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.280365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.280383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.280386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.281437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsWithLookupJoin1+RemoveLimitOperator [GOOD] Test command err: Trying to start YDB, gRPC: 24743, MsgBus: 22412 2025-10-24T16:28:10.421180Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825929154131001:2269];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:10.421244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001edf/r3tmp/tmpG66XQm/pdisk_1.dat 2025-10-24T16:28:10.519947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:10.519971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:10.521997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24743, node 1 2025-10-24T16:28:10.531446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:10.531935Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:10.532007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:10.532009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:10.532011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:10.532048Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:10.532168Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825929154130747:2081] 1761323290415274 != 1761323290415277 TClient is connected to server localhost:22412 TClient is connected to server localhost:22412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:10.605638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:10.618414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:10.809626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:10.863011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929154131423:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.863039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.863067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929154131435:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.863159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929154131437:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.863174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.863950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.866448Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825929154131438:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:10.936766Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825929154131490:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:11.000683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.016953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.026414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.036791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.050499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.107845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.119358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.127621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.141390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.155525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.168972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:11.184008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schem ... 74976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.132907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.134682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.135007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.135021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.135023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.136183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.137341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.138854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.139624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.140795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.141854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found;
:7:3: Warning: ORDER BY without LIMIT in subquery will be ignored, code: 4504
:7:3: Warning: ORDER BY without LIMIT in subquery will be ignored, code: 4504 >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::TestReadOnly-withSink >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx |61.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |61.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-10-24T16:28:15.383094Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:345} Bootstrap 2025-10-24T16:28:15.384325Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmpBXiq3b//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:15.384418Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmpBXiq3b//pdisk0.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-10-24T16:28:15.384630Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:15.384918Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-10-24T16:28:15.384932Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:15.385066Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-10-24T16:28:15.385075Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:15.385195Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-10-24T16:28:15.385202Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:15.385314Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-10-24T16:28:15.385324Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 1040187392 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:15.385537Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 31 PipeClientId# [1:40:2077] ControllerId# 72057594037932033 2025-10-24T16:28:15.385544Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-10-24T16:28:15.385575Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:320} StartInvalidGroupProxy GroupId# 4294967295 2025-10-24T16:28:15.385601Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:332} StartRequestReportingThrottler 2025-10-24T16:28:15.396434Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-10-24T16:28:15.396498Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:345} Bootstrap 2025-10-24T16:28:15.397804Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:15.397856Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 1040187392 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:15.398100Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 31 PipeClientId# [2:84:2062] ControllerId# 72057594037932033 2025-10-24T16:28:15.398106Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-10-24T16:28:15.398126Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:320} StartInvalidGroupProxy GroupId# 4294967295 2025-10-24T16:28:15.398146Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:332} StartRequestReportingThrottler 2025-10-24T16:28:15.399653Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-10-24T16:28:15.405735Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-10-24T16:28:15.405883Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-10-24T16:28:15.405962Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.589514Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.590707Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.590729Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-10-24T16:28:15.592032Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-10-24T16:28:15.592143Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\024\236\304\020~\007\340\353\266\000\022\356\372\271\373\3676W\252\210" } 2025-10-24T16:28:15.592225Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.592236Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-10-24T16:28:15.592257Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-10-24T16:28:15.592275Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\252(\246\271G\037]\226\250\335g\235\214\226\340\241\247\207\366\376" } 2025-10-24T16:28:15.592299Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.592372Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-10-24T16:28:15.592382Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-10-24T16:28:15.592486Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:15.592571Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-10-24T16:28:15.593688Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-10-24T16:28:15.593803Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-10-24T16:28:15.593810Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-10-24T16:28:15.593845Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\024\236\304\020~\007\340\353\266\000\022\356\372\271\373\3676W\252\210" } 2025-10-24T16:28:15.593870Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-10-24T16:28:15.593881Z node 1 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 2 Binding# {2.0/13105934115861455620@[0:0:0]} SessionId# [0:0:0] 2025-10-24T16:28:15.593965Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:15.594015Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-10-24T16:28:15.594116Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-10-24T16:28:15.594125Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} ... NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmptmIxmX/new_pdisk.dat" PDiskGuid: 921927111993799959 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN InferPDiskSlotCountFromUnitSize: 0 InferPDiskSlotCountMax: 0 } } InstanceId: "c6789b05-57543575-b4d8cf72-46d76354" AvailDomain: 31 } 2025-10-24T16:28:19.249723Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:857} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmptmIxmX/new_pdisk.dat" PDiskGuid: 921927111993799959 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN InferPDiskSlotCountFromUnitSize: 0 InferPDiskSlotCountMax: 0 } } InstanceId: "c6789b05-57543575-b4d8cf72-46d76354" AvailDomain: 31 } 2025-10-24T16:28:19.249746Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1000 Path: "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmptmIxmX/new_pdisk.dat" PDiskGuid: 921927111993799959 PDiskCategory: 0 PDiskConfig { ChunkSize: 33554432 } EntityStatus: CREATE ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN InferPDiskSlotCountFromUnitSize: 0 InferPDiskSlotCountMax: 0 } } 2025-10-24T16:28:19.249917Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 0 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-10-24T16:28:19.249942Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1000 Path# "/home/runner/.ya/build/build_root/fxx3/0021a1/r3tmp/tmptmIxmX/new_pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-10-24T16:28:19.250139Z node 2 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:839} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 2 InstanceId: "c6789b05-57543575-b4d8cf72-46d76354" Comprehensive: true AvailDomain: 31 UpdateSyncers: true } 2025-10-24T16:28:19.250172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-10-24T16:28:19.260763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-10-24T16:28:19.612094Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:839} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "c6789b05-57543575-b4d8cf72-46d76354" AvailDomain: 31 } 2025-10-24T16:28:19.612151Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:857} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "c6789b05-57543575-b4d8cf72-46d76354" AvailDomain: 31 } 2025-10-24T16:28:19.612185Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {VDisks { VDiskID { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } VDiskKind: Default EntityStatus: CREATE StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 921927111993799959 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 3187671040 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-10-24T16:28:19.612371Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 0 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-10-24T16:28:19.612435Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1000 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-10-24T16:28:19.612463Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 921927111993799959 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:19.612689Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [be000000:1:0:0:0] VSlotId# 1:1000:1000 PDiskGuid# 921927111993799959 2025-10-24T16:28:19.613215Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639257 Sender# [1:368:2319] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:20.812567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 921927111993799959 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-10-24T16:28:20.812825Z node 2 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:42:2079] SessionId# [2:123:2048] Cookie# 13105934115861455620 2025-10-24T16:28:20.812843Z node 2 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:123:2048] Inserted# false Subscription# {SessionId# [2:123:2048] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-10-24T16:28:20.812881Z node 2 :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 1 Cookie# 13105934115861455620 SessionId# [2:123:2048] Binding# Record# {CacheUpdate { KeyValuePairs { Key: "Gbe000000" Generation: 1 Value: "\010\200\200\200\360\013\020\001\030\000\"\026\n\024\n\022\010\001\020\350\007\030\350\007 \227\352\304\231\256\372\325\345\0140\0008\000B\000J\000P\200\200\200\360\013X\000j\000p\000" } } } RootNodeId# 2 2025-10-24T16:28:20.812888Z node 2 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:123:2048] Inserted# false Subscription# {SessionId# [2:123:2048] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-10-24T16:28:20.813002Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-10-24T16:28:20.813470Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:86:2064] SessionId# [1:136:2048] Cookie# 13105934115861455620 2025-10-24T16:28:20.813485Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [1:136:2048] Inserted# false Subscription# {SessionId# [1:136:2048] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-10-24T16:28:20.813517Z node 1 :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 13105934115861455620 SessionId# [1:136:2048] Binding# {2.2/13105934115861455620@[1:136:2048]} Record# {RootNodeId: 2 CacheUpdate { KeyValuePairs { Key: "Gbe000000" Generation: 1 Value: "\010\200\200\200\360\013\020\001\030\000\"\026\n\024\n\022\010\001\020\350\007\030\350\007 \227\352\304\231\256\372\325\345\0140\0008\000B\000J\000P\200\200\200\360\013X\000j\000p\000" } } } 2025-10-24T16:28:20.814464Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } } 2025-10-24T16:28:20.989791Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1224} Handle(TEvStatusUpdate) 2025-10-24T16:28:21.018549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 921927111993799959 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-10-24T16:28:21.018847Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1224} Handle(TEvStatusUpdate) 2025-10-24T16:28:21.018905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 3187671040 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1000 VSlotId: 1000 PDiskGuid: 921927111993799959 Status: READY OnlyPhantomsRemain: false } } Sending TEvPut 2025-10-24T16:28:21.019024Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:139} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2025-10-24T16:28:21.019033Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 3187671040 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:21.019650Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 Sending TEvGet Sending TEvVGet Sending TEvPut 2025-10-24T16:28:21.110145Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:139} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2025-10-24T16:28:21.110166Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 3187671040 HasGroupInfo# false GroupInfoGeneration# 2025-10-24T16:28:21.110173Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:280} RequestGroupConfig GroupId# 3187671040 2025-10-24T16:28:21.110253Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 3187671040 2025-10-24T16:28:21.110286Z node 2 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [2:22:2050] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:21.110440Z node 2 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [2:22:2050] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:21.111079Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 3187671040 2025-10-24T16:28:21.111133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:22:2050] Cookie# 0 Recipient# [1:404:2347] RecipientRewrite# [1:368:2319] Request# {NodeID: 2 GroupIDs: 3187671040 } StopGivingGroups# false 2025-10-24T16:28:21.111166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 3187671040 } Sending TEvGet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7772, MsgBus: 11136 2025-10-24T16:28:09.546504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825925368251612:2200];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:09.546555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee5/r3tmp/tmpYDWBdf/pdisk_1.dat 2025-10-24T16:28:09.622486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:09.638279Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7772, node 1 2025-10-24T16:28:09.649566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:09.649597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:09.650598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:09.664847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:09.664863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:09.664865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:09.664915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11136 TClient is connected to server localhost:11136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:09.738371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:09.808593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:10.024669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929663219415:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.024709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.024852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929663219427:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.024864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825929663219428:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.024942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.025814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.028288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:28:10.028425Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825929663219431:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:10.102643Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825929663219482:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.156317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.191947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.204992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.219922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.239965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.285060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.308892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.335692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.359835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.375796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.397043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.411078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:10.424736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsa ... ult=not_found; 2025-10-24T16:28:19.591280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.591282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.593241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.593258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.593260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.594262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.594272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.594275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.595293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.595303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.595305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.596345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.596360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.596362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.597595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.597612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.597615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.598706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.598715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.598718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.599777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.599785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.599788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.600829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.600843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.600846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.601840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.601849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.601852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.602814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.602823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.602825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.603836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.605199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.605213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.605216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.606292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.606300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.606303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.607278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.607287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.607289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.608104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.608114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.609479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.609489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; 2025-10-24T16:28:19.609492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=67;result=not_found; >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert |61.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |61.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |61.7%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> KqpLocksTricky::TestNoWrite >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpTx::RollbackRoTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-10-24T16:28:16.093308Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:345} Bootstrap 2025-10-24T16:28:16.094489Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpA1J0xu//pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:16.094575Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 0 Path# "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpA1J0xu//pdisk0.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-10-24T16:28:16.094789Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:16.095089Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:0:0] VSlotId# 1:0:0 PDiskGuid# 1 2025-10-24T16:28:16.095105Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:16.095223Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:1:0] VSlotId# 1:0:1 PDiskGuid# 1 2025-10-24T16:28:16.095231Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:16.095340Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:2:0] VSlotId# 1:0:2 PDiskGuid# 1 2025-10-24T16:28:16.095348Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-10-24T16:28:16.095452Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:279} StartLocalVDiskActor done VDiskId# [3e000000:1:0:3:0] VSlotId# 1:0:3 PDiskGuid# 1 2025-10-24T16:28:16.095464Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 1040187392 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:16.095691Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 31 PipeClientId# [1:40:2077] ControllerId# 72057594037932033 2025-10-24T16:28:16.095699Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-10-24T16:28:16.095729Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:320} StartInvalidGroupProxy GroupId# 4294967295 2025-10-24T16:28:16.095756Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:332} StartRequestReportingThrottler 2025-10-24T16:28:16.114571Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-10-24T16:28:16.114636Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:345} Bootstrap 2025-10-24T16:28:16.120207Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:16.120274Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 1040187392 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:16.120513Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 31 PipeClientId# [2:84:2062] ControllerId# 72057594037932033 2025-10-24T16:28:16.120520Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-10-24T16:28:16.120540Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:320} StartInvalidGroupProxy GroupId# 4294967295 2025-10-24T16:28:16.120564Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:332} StartRequestReportingThrottler 2025-10-24T16:28:16.122156Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:28} Bootstrap 2025-10-24T16:28:16.122451Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-10-24T16:28:16.122571Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-10-24T16:28:16.122635Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.302264Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.302394Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.302407Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-10-24T16:28:16.303758Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-10-24T16:28:16.303869Z node 2 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "\252(\246\271G\037]\226\250\335g\235\214\226\340\241\247\207\366\376" } 2025-10-24T16:28:16.304891Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-10-24T16:28:16.304982Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.304993Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-10-24T16:28:16.305030Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-10-24T16:28:16.305056Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 0 NodeId# :0/0 Meta# {Fingerprint: "0}Q\275\355\212\340\222\355\337\246d`\231\320\003\354\007T!" } 2025-10-24T16:28:16.305136Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.305192Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-10-24T16:28:16.305251Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-10-24T16:28:16.305291Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-10-24T16:28:16.305324Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-10-24T16:28:16.305394Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 0 Path: "pdisk0.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } VDisks { VDiskID { GroupID: 1040187392 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } Groups { GroupID: 1040187392 GroupGeneration: 1 ErasureSpecies: 1 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 0 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 1 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 2 PDiskGuid: 1 } } FailDomains { VDiskLocations { NodeID: 1 PDiskID: 0 VDiskSlotID: 3 PDiskGuid: 1 } } } } AvailabilityDomains: 31 } 2025-10-24T16:28:16.305428Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-10-24T16:28:16.311738Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-10-24T16:28:16.311766Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-10-24T16:28:16.311811Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:477} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "0}Q\275\355\212\340\222\355\337\246d`\231\320\003\354\007T!" } 2025-10-24T16:28:16.311836Z node 1 :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 1 2025-10-24T16:28:16.311864Z node 1 :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 2 Binding# {2.0/14397406086827994751@[0:0:0]} SessionId# [0:0:0] 2025-10-24T16:28:16.311955Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:332} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 3 2025-10-24T16:28:16.311960Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-10- ... 3681298323524653273 Status: READY OnlyPhantomsRemain: false } } Sending TEvPut 2025-10-24T16:28:21.872992Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:139} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2025-10-24T16:28:21.873001Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 3187671040 HasGroupInfo# true GroupInfoGeneration# 1 2025-10-24T16:28:21.873259Z node 1 :BS_NODE DEBUG: {NW68@node_warden_group.cpp:87} ConfigureLocalProxy propose GroupId# 3187671040 MainKey# {Id# '/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt' Version# 1} 2025-10-24T16:28:21.873350Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:21.873488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK11@propose_group_key.cpp:119} Handle TEvControllerProposeGroupKey Request# {NodeId: 1 GroupId: 3187671040 LifeCyclePhase: 1 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" MainKeyVersion: 1 GroupKeyNonce: 3187671040 } 2025-10-24T16:28:21.873508Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK07@propose_group_key.cpp:82} TTxProposeGroupKey Execute 2025-10-24T16:28:21.974768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK08@propose_group_key.cpp:96} TTxProposeGroupKey Complete 2025-10-24T16:28:21.974873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:403:2353] Cookie# 0 Recipient# [1:403:2353] RecipientRewrite# [1:403:2353] Request# {NodeID: 1 GroupIDs: 3187671040 } StopGivingGroups# false 2025-10-24T16:28:21.974893Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 3187671040 } 2025-10-24T16:28:21.974983Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:839} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.975021Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:857} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.975043Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } 2025-10-24T16:28:21.975208Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 0 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-10-24T16:28:21.975258Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1000 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 Sending TEvGet Sending TEvVGet Sending TEvPut 2025-10-24T16:28:21.996521Z node 2 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:139} HandleForwarded GroupId# 3187671040 EnableProxyMock# false NoGroup# false 2025-10-24T16:28:21.996546Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 3187671040 HasGroupInfo# false GroupInfoGeneration# 2025-10-24T16:28:21.996554Z node 2 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:280} RequestGroupConfig GroupId# 3187671040 2025-10-24T16:28:21.996646Z node 2 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 3187671040 2025-10-24T16:28:21.996674Z node 2 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [2:22:2050] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:21.996760Z node 2 :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639258 Sender# [2:22:2050] SessionId# [0:0:0] Cookie# 0 2025-10-24T16:28:21.996837Z node 2 :BS_NODE DEBUG: {NW68@node_warden_group.cpp:87} ConfigureLocalProxy propose GroupId# 3187671040 MainKey# {Id# '/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt' Version# 1} 2025-10-24T16:28:21.996952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [2:22:2050] Cookie# 0 Recipient# [1:441:2382] RecipientRewrite# [1:403:2353] Request# {NodeID: 2 GroupIDs: 3187671040 } StopGivingGroups# false 2025-10-24T16:28:21.996983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 3187671040 } 2025-10-24T16:28:21.997035Z node 2 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 3187671040 2025-10-24T16:28:21.997118Z node 2 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:839} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.997139Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:857} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.997160Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } 2025-10-24T16:28:21.997791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK11@propose_group_key.cpp:119} Handle TEvControllerProposeGroupKey Request# {NodeId: 2 GroupId: 3187671040 LifeCyclePhase: 1 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\343\335\222\255\331\025\"c\320;\203\000\203d*4w\001\201\213f\031\"\326Y\341\305m*\371\366\002}z\303\315" MainKeyVersion: 1 GroupKeyNonce: 3187671040 } 2025-10-24T16:28:21.997811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK07@propose_group_key.cpp:82} TTxProposeGroupKey Execute 2025-10-24T16:28:21.997817Z node 1 :BS_CONTROLLER ERROR: {BSCTXPGK04@propose_group_key.cpp:47} Group LifeCyclePhase does not match ELCP_INITIAL GroupId.GetRawId()# 3187671040 LifeCyclePhase# 3 2025-10-24T16:28:21.997826Z node 1 :BS_CONTROLLER DEBUG: {BSCTXPGK08@propose_group_key.cpp:96} TTxProposeGroupKey Complete 2025-10-24T16:28:21.997849Z node 1 :BS_CONTROLLER ERROR: {BSCTXPGK10@propose_group_key.cpp:108} TTxProposeGroupKey error GroupId# 3187671040 Status# ERROR Request# {NodeId: 2 GroupId: 3187671040 LifeCyclePhase: 1 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\343\335\222\255\331\025\"c\320;\203\000\203d*4w\001\201\213f\031\"\326Y\341\305m*\371\366\002}z\303\315" MainKeyVersion: 1 GroupKeyNonce: 3187671040 } 2025-10-24T16:28:21.997882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:403:2353] Cookie# 0 Recipient# [1:403:2353] RecipientRewrite# [1:403:2353] Request# {NodeID: 2 GroupIDs: 3187671040 } StopGivingGroups# false 2025-10-24T16:28:21.997888Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 2 GroupIDs: 3187671040 } 2025-10-24T16:28:21.997961Z node 2 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:839} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.997974Z node 2 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:857} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 2 ServiceSet { Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } } 2025-10-24T16:28:21.997986Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 3187671040 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1000 VDiskSlotID: 1000 PDiskGuid: 3681298323524653273 } } } EncryptionMode: 1 LifeCyclePhase: 3 MainKeyId: "/home/runner/.ya/build/build_root/fxx3/002155/r3tmp/tmpB1bF9y//key.txt" EncryptedGroupKey: "\301\246.Pe\017\302W(\035\340\036\240\220\334!K\2601\002Q\343\370s\226\0239\262\273\221#\"x\313\236\236" GroupKeyNonce: 3187671040 MainKeyVersion: 1 StoragePoolName: "" DeviceType: ROT } } Sending TEvGet >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 10136, MsgBus: 8655 2025-10-24T16:28:19.162106Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825970685731767:2136];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:19.162279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:19.165334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dca/r3tmp/tmpJwpCkc/pdisk_1.dat 2025-10-24T16:28:19.297987Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:19.307922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825970685731669:2081] 1761323299159395 != 1761323299159398 2025-10-24T16:28:19.308705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:19.308734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:19.309134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:19.312607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10136, node 1 2025-10-24T16:28:19.352129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:19.352142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:19.352144Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:19.352192Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8655 TClient is connected to server localhost:8655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:19.504978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:19.512853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:19.525167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:19.578826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:19.622576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:19.657171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:19.680122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:19.760665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825970685733311:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.760714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.764076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825970685733321:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.764105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.827480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.844124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.867355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.895327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.908626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.927319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.949449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.974858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:19.997330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825970685734181:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.997360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.997529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825970685734186:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.997544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825970685734187:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: ... 2025-10-24T16:28:21.904242Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:21.904266Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:21.904268Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:21.904307Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11921 TClient is connected to server localhost:11921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:21.967600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:21.980780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:22.001699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:22.036761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:22.053026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:22.096417Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:22.236905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825981989937975:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.236935Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.237026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825981989937985:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.237034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.248088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.256234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.273183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.285375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.300344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.314377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.327940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.342603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:22.366240Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825981989938846:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.366268Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.366367Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825981989938851:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.366374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7564825981989938852:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.366439Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.367135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:22.370072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-10-24T16:28:22.370132Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7564825981989938855:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:22.452286Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564825981989938907:3549] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:22.798793Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=2&id=NjI3M2UxNjAtY2JhZjU2YzMtYzgwZWUyYS1iZGFjMzg2YQ==, ActorId: [2:7564825981989939203:2521], ActorState: ReadyState, TraceId: 01k8bgsfwb0mg35nzwdc5029z1, Create QueryResponse for error on request, msg: >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRows >> TS3WrapperTests::CopyPartUpload |61.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitPrepared >> TS3WrapperTests::CopyPartUpload [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-10-24T16:27:42.896235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:42.910917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:42.910944Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.911438Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:42.911485Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:42.911538Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:42.912599Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:42.922914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:42.922967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:42.923133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:42.923142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:42.923149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:42.923223Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:42.923235Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:42.923246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:42.961742Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:42.964664Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:42.964733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:42.964751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:42.964756Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:42.964759Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:42.964764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:42.964810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:42.964816Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:42.964877Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:42.964899Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:42.964905Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:42.964911Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:42.964918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:42.964923Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:42.964928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:42.964932Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:42.964936Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:42.964944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:42.964948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:42.964953Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:42.965274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:42.965280Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:42.965289Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:42.965321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:42.965331Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:42.965340Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:42.965347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:42.965350Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:42.965353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:42.965357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:42.965415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:42.965419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:42.965422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:42.965424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:42.965432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:42.965434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:42.965437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:42.965439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:42.965443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:42.976102Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:42.976123Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:42.976129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:42.976139Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:42.976153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:42.976244Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:42.976250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:42.976257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:42.976274Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:42.976277Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:42.976309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:42.976316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:42.976320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:42.976324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:42.979057Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:42.979085Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:42.979148Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:42.979154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:42.979163Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:42.979171Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:42.979177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:42.979185Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:42.979190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... 8:2316]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-10-24T16:28:22.939479Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939482Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-10-24T16:28:22.939497Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-10-24T16:28:22.939501Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939505Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-10-24T16:28:22.939532Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-10-24T16:28:22.939536Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939540Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-10-24T16:28:22.939548Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-10-24T16:28:22.939553Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939557Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-10-24T16:28:22.939572Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-10-24T16:28:22.939575Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939579Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-10-24T16:28:22.939591Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-10-24T16:28:22.939595Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939599Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-10-24T16:28:22.939611Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-10-24T16:28:22.939614Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939618Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-10-24T16:28:22.939629Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-10-24T16:28:22.939633Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939637Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-10-24T16:28:22.939645Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-10-24T16:28:22.939649Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939652Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-10-24T16:28:22.939663Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-10-24T16:28:22.939667Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939670Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-10-24T16:28:22.939684Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-10-24T16:28:22.939687Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939691Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-10-24T16:28:22.939706Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-10-24T16:28:22.939710Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939716Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-10-24T16:28:22.939728Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-10-24T16:28:22.939731Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939735Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-10-24T16:28:22.939746Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-10-24T16:28:22.939750Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939753Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-10-24T16:28:22.939765Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-10-24T16:28:22.939768Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939772Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-10-24T16:28:22.939780Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-10-24T16:28:22.939784Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939788Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-10-24T16:28:22.939803Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-10-24T16:28:22.939807Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939813Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-10-24T16:28:22.939822Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-10-24T16:28:22.939827Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.939831Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-10-24T16:28:22.940009Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-10-24T16:28:22.940018Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.940023Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-10-24T16:28:22.940043Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:807:2733], Recipient [32:348:2316]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-10-24T16:28:22.940047Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:22.940051Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 30 31 25 27 24 31 27 31 24 27 28 28 22 24 24 12 12 28 19 20 20 20 28 22 15 22 15 22 15 22 9 - actual 30 31 25 27 24 31 27 31 24 27 28 28 22 24 24 12 12 28 19 20 20 20 28 22 15 22 15 22 15 22 9 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |61.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-10-24T16:28:24.144709Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 58F4F887-1971-4D60-8CF0-467C16C02087, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:8158 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2D724DB2-436B-49B2-AAC3-EA2D79B14782 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-10-24T16:28:24.146463Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 58F4F887-1971-4D60-8CF0-467C16C02087, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-10-24T16:28:24.146598Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# C8FE3446-B86C-4833-98B5-7AAE55C85D34, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:8158 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C1A1C2F1-B9A9-4FB8-9270-C5D8E2B2A726 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-10-24T16:28:24.147267Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# C8FE3446-B86C-4833-98B5-7AAE55C85D34, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-10-24T16:28:24.147355Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 6E14E6E5-0E17-4563-AED3-1D9F55577DCE, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8158 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DF263809-28E3-4D6C-ACB7-B8D08C94ACBD amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-10-24T16:28:24.148048Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 6E14E6E5-0E17-4563-AED3-1D9F55577DCE, response# UploadPartCopyResult { } 2025-10-24T16:28:24.148132Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 3DB86A7D-6DD7-4D1C-A186-62444E022BD0, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8158 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CEBA206A-B3BE-453E-BCD5-9CBD407F2B20 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-10-24T16:28:24.148845Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 3DB86A7D-6DD7-4D1C-A186-62444E022BD0, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-10-24T16:28:24.148922Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 7CB8D0E3-FCE0-4510-9BB3-83F95A6089AC, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:8158 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AC2A7651-930E-4816-A2E0-85E4B3E56480 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-10-24T16:28:24.151332Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 7CB8D0E3-FCE0-4510-9BB3-83F95A6089AC, response# GetObjectResult { } >> TTxLocatorTest::TestImposibleSize >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> YdbIndexTable::MultiShardTableOneUniqIndex >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> TTxLocatorTest::TestImposibleSize [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-10-24T16:28:25.010701Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2146: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-10-24T16:28:25.010833Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-10-24T16:28:25.010992Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-10-24T16:28:25.011454Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.011588Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-10-24T16:28:25.014239Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.014278Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.014289Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.014305Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1466: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-10-24T16:28:25.014342Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.014369Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-10-24T16:28:25.014413Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1077: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-10-24T16:28:25.014583Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#281474976710656 2025-10-24T16:28:25.014632Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-10-24T16:28:25.015350Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-10-24T16:28:25.015452Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#123456 2025-10-24T16:28:25.015588Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.015601Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.015620Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-10-24T16:28:25.015625Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-10-24T16:28:25.015675Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#281474976587200 2025-10-24T16:28:25.015712Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-10-24T16:28:25.015716Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-10-24T16:28:25.015757Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2118] requested range size#246912 2025-10-24T16:28:25.015795Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.015804Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:54: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-10-24T16:28:25.015815Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-10-24T16:28:25.015819Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2118] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-10-24T16:28:25.020223Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2122] requested range size#281474976340288 2025-10-24T16:28:25.020301Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-10-24T16:28:25.020308Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:88:2122] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> YdbIndexTable::OnlineBuild >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapNamedStatement ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 10945, MsgBus: 1228 2025-10-24T16:28:20.054014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:20.054523Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825972302540334:2261];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:20.054564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d1b/r3tmp/tmpHadVgz/pdisk_1.dat 2025-10-24T16:28:20.127317Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10945, node 1 2025-10-24T16:28:20.136589Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825972302540097:2081] 1761323300040247 != 1761323300040250 2025-10-24T16:28:20.159680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:20.159691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:20.159693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:20.159736Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:20.160614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:20.160638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:20.161231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1228 2025-10-24T16:28:20.227226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:20.298775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:20.302090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:20.312906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:20.346879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:20.388743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.429862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:20.691357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825972302541739:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.691404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.693172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825972302541749:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.693201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.739153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.749503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.761511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.774471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.790447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.802507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.816591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.830383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.856495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825972302542613:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.856520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825972302542618:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.856528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.856606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825972302542621:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:20.856619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, statu ... (empty maybe) 2025-10-24T16:28:23.724518Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10399 TClient is connected to server localhost:10399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:23.836141Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:23.840801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:23.847742Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:23.866899Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:23.892584Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:23.906235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:23.988020Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:24.208180Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825991513054485:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.208204Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.208362Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825991513054495:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.208370Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.218814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.232616Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.241781Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.259822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.272669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.283688Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.296370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.312536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.343872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825991513055359:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.343906Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.343948Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825991513055364:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.343958Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825991513055366:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.343982Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.345028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:24.350646Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:28:24.350781Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564825991513055368:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:24.441974Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564825991513055420:3556] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:24.675942Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:24.988199Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=3&id=NWZkZDU1ODQtZDY1ZWE1OGEtMjg4NmJmNWItZWI5MWY3NzE=, ActorId: [3:7564825991513055680:2512], ActorState: ExecuteState, TraceId: 01k8bgsj0g762aqbwzb1mrt7td, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpTx::CommitPrepared [GOOD] >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] |61.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |61.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |61.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25363, MsgBus: 22747 2025-10-24T16:28:08.400935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922394802648:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.400959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee8/r3tmp/tmpKMn8qW/pdisk_1.dat 2025-10-24T16:28:08.459984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.460433Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.461275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825922394802624:2081] 1761323288400796 != 1761323288400799 TServer::EnableGrpc on GrpcPort 25363, node 1 2025-10-24T16:28:08.467981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.467992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.467994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.468027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22747 2025-10-24T16:28:08.507453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.507487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.509510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.538700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.543247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.711719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.857529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922394803300:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.857534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922394803310:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.857550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.857600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922394803315:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.857616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.858250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.860157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825922394803314:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:08.951281Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825922394803367:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:09.007737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:09.048968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:09.049030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:09.049087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:09.049118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:09.049144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:09.049169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:09.049194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:09.049218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:09.049245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:09.049267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:09.049291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:09.049315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:09.049339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7564825926689770888:2339];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:09.049944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:28:09.049972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:28:09.049984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:28:09.049989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=Table ... ult=not_found; 2025-10-24T16:28:22.096466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.096468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.096642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.096651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.096653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.097915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.097933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.097936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.099877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.100175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.100186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.100189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.101448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.102759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.105911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; >> YdbIndexTable::MultiShardTableOneIndex >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRows [GOOD] >> TDSProxyDiscover::Block42SuccessLastBlobNotFullyWritten >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 19871, MsgBus: 27407 2025-10-24T16:28:21.381622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825978343343786:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:21.381871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d0e/r3tmp/tmpj9CEJ7/pdisk_1.dat 2025-10-24T16:28:21.444877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:21.457863Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:21.459909Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825978343343755:2081] 1761323301381379 != 1761323301381382 TServer::EnableGrpc on GrpcPort 19871, node 1 2025-10-24T16:28:21.472453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:21.472462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:21.472463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:21.472495Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27407 2025-10-24T16:28:21.495260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:21.495287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:21.496124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:21.520164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:21.532745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.551570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.572111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.585697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.632430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:21.820608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825978343345410:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.820638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.820814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825978343345420:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.820821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.866685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.877456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.886323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.904795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.915046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.935547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.945141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.957722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.980574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825978343346282:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.980602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.980688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825978343346288:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.980774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825978343346287:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.980787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.981488Z node 1 :FLAT_TX_SCHEMESHARD ... VE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27032, node 3 2025-10-24T16:28:24.436976Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:24.436987Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:24.436989Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:24.437045Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28554 2025-10-24T16:28:24.496279Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:24.515603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:24.517535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:28:24.526045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.541217Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.571725Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.586924Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.832615Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825990178820290:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.832657Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.832926Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825990178820300:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.832938Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:24.857344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.886335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.903054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.926852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.947287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.960025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.975962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:24.988982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.012682Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994473788457:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.012728Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.012967Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994473788462:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.013001Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994473788463:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.013010Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.014250Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:25.020253Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564825994473788466:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:25.122850Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564825994473788518:3549] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:25.407949Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |61.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |61.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |61.7%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |61.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TDSProxyDiscover::Block42SuccessLastBlobNotFullyWritten [GOOD] >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 >> TxUsage::WriteToTopic_Demo_42_Query >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] |61.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |61.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TxUsage::ReadRuleGeneration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18628, MsgBus: 64089 2025-10-24T16:28:08.321072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825921722518731:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.321088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001eee/r3tmp/tmps2mLwK/pdisk_1.dat 2025-10-24T16:28:08.364877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 18628, node 1 2025-10-24T16:28:08.379410Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.379691Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825921722518707:2081] 1761323288320953 != 1761323288320956 2025-10-24T16:28:08.386018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.386041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.386043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.386079Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64089 2025-10-24T16:28:08.425732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.425766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.428262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.452118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.659923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.763653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921722519388:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.763656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921722519399:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.763677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.764871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921722519403:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.764887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.766768Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825921722519402:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:08.855361Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825921722519455:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.897186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:08.927942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.928052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.928097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:08.928119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:08.928142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:08.928165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:08.928189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:08.928189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825921722519656:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.928202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825921722519656:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.928211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:08.928233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:08.928239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825921722519656:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:08.928283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:08.928310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:08.928331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:08.928354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825921722519657:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:08.928389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7564825921722519656:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLAS ... nternal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.326537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.327599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.328675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.329978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.330845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.330853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.330855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.330993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.331005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.331008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.332880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.333711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.334433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.334448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.334450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:23.367946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:23.367967Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 9022, MsgBus: 19947 2025-10-24T16:28:21.183135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825976768287427:2188];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:21.183169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d19/r3tmp/tmp5P6zp6/pdisk_1.dat 2025-10-24T16:28:21.225726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 9022, node 1 2025-10-24T16:28:21.238991Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:21.239887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825976768287250:2081] 1761323301180043 != 1761323301180046 2025-10-24T16:28:21.254401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:21.254419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:21.254421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:21.254474Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19947 2025-10-24T16:28:21.285646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:21.285683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:21.286785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:21.320029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:21.330291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.354395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.379739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.391119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:21.450519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:21.624107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825976768288909:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.624140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.624310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825976768288919:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.624324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.672458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.683784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.707041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.718772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.732881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.746780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.761076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.775545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:21.797974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825976768289781:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.798009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.798018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825976768289786:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.798062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825976768289788:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.798071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.798890Z node 1 :FLAT_TX_SCHEMESHARD W ... rpcPort 26590, node 3 2025-10-24T16:28:25.092934Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:25.092949Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:25.092951Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:25.093007Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2691 TClient is connected to server localhost:2691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:25.156993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:25.166638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.178805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.214399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.233128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.299352Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:25.527957Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994456027294:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.527997Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.528309Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994456027304:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.528322Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.541898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.559715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.581641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.595356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.607356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.618779Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.647130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.675015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.707029Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994456028167:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.707058Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.707150Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994456028172:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.707165Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564825994456028173:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.707243Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.708279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:25.712537Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564825994456028176:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:25.805864Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564825994456028228:3557] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:26.063914Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:26.213125Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=3&id=YmMxZGU3OC0xNTYxOWUyOS1kYTk5ZTlhZi00Y2M1OWZiMw==, ActorId: [3:7564825998750995785:2512], ActorState: ExecuteState, TraceId: 01k8bgsk6h6nk4328v6rxasktw, Create QueryResponse for error on request, msg: >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table >> TBlobStorageProxySequenceTest::TestGivenBlock42IntersectingPutWhenNodataOkThenOk [GOOD] >> TDSProxyGetTest::TestBlock42GetIntervalsWipedError >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] >> TBlobStorageProxySequenceTest::TestGivenBlock42MultiPut2ItemsStatuses [GOOD] >> TDSProxyGetTest::TestBlock42GetSpecific >> TDSProxyGetTest::TestBlock42GetSpecific [GOOD] >> TDSProxyPatchTest::NaiveErrorOnGetItem_ErasureNone [GOOD] >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 29359, MsgBus: 6154 2025-10-24T16:28:21.165823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825979285849598:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:21.165982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d1a/r3tmp/tmpOudPdn/pdisk_1.dat 2025-10-24T16:28:21.212143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:21.222890Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:21.223099Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825979285849477:2081] 1761323301164635 != 1761323301164638 TServer::EnableGrpc on GrpcPort 29359, node 1 2025-10-24T16:28:21.233803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:21.233821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:21.233823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:21.233872Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6154 2025-10-24T16:28:21.268473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:21.268508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:21.269481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:21.296479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:21.420325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:21.564939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825979285850156:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.564944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825979285850166:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.564964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.565043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825979285850171:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.565069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:21.565750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:21.567323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825979285850170:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:21.633952Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825979285850223:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:21.690542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:21.712178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:21.712227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:21.712267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:21.712289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:21.712304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:21.712323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:21.712342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:21.712359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:21.712380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:21.712395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:21.712414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:21.712428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:21.712444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825979285850388:2336];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:21.712578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825979285850385:2333];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:21.712601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825979285850385:2333];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:21.712655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825979285850385:2333];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:21.712681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825979285850385:2333];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_N ... =tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.621442Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.621445Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.622588Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.622597Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.622600Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.623740Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.623752Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.623755Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.625081Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.625091Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.625093Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.626327Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.626351Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.626354Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.627519Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.627530Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.627532Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.628787Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.628796Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.628798Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.630150Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.630170Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.630173Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.631352Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.631362Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.631364Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.632592Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.632608Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.632611Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633275Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633285Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633288Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633618Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633627Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.633630Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.634982Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.635007Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.635010Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.636511Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.636531Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.636534Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.637666Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.637676Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.637678Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:25.881051Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710667;fline=manager.cpp:77;broken_lock_id=281474976710665; 2025-10-24T16:28:25.888494Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037921;self_id=[2:7564825990673229995:2407];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037921;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710665;problem=finished; 2025-10-24T16:28:25.889251Z node 2 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=manager.cpp:144;event=abort;tx_id=281474976710665;problem=finished; >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx4 >> TBlobStorageProxySequenceTest::TestProtobufSizeWithMultiGet [GOOD] >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck >> TDSProxyPatchTest::SecuredOk_ErasureMirror3dc [GOOD] >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3057, MsgBus: 63443 2025-10-24T16:28:08.345460Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825920666112962:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.345484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:08.350285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ef1/r3tmp/tmptHoRhI/pdisk_1.dat 2025-10-24T16:28:08.405275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825920666112937:2081] 1761323288345288 != 1761323288345291 2025-10-24T16:28:08.411043Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3057, node 1 2025-10-24T16:28:08.421050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.421068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.421070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.421123Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63443 2025-10-24T16:28:08.454964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.454988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.455927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:08.457235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:08.508084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:08.512199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.781953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920666113610:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.781978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.782142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920666113622:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.782150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825920666113623:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.782235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.782785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.784347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:28:08.784435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825920666113626:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:08.868435Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825920666113677:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.922721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:08.959399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825920666113937:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.962425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.962513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.962593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:08.962624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:08.962651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:08.962676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:08.962705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:08.962734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:08.962754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:08.962774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:08.962797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:08.962826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:08.962847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825920666113919:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:08.964016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825920666113937:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.964060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825920666113937:2347];tablet_id=72075186224037900;process=TTxIn ... nternal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.549719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.549898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.549909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.549911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.551392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.552833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.554304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.555988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.557302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.557324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.557327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.558520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.558541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.558544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.559920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.559939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.559942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.560279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.560302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.560305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.561660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.561682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.561685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.572875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.572903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.572906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:23.382525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:23.382549Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc >> TDSProxyGetTest::TestMirror32GetBlobCrcCheck [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] >> TDSProxyPatchTest::NaiveErrorOnPut_ErasureNone [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors >> TDSProxyPutTest::TestBlock42PutStatusErrorWith_2_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block >> TKqpScheduler::QueriesWithFairShareOverlimit+AllowOverlimit [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2991, MsgBus: 30204 2025-10-24T16:28:08.370540Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825921584113139:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.370653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f13/r3tmp/tmp09HDZ2/pdisk_1.dat 2025-10-24T16:28:08.434791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.449350Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2991, node 1 2025-10-24T16:28:08.459530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.459542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.459544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.459606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:08.477437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.477462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.480112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30204 TClient is connected to server localhost:30204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.526700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.531268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.708486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.807268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921584113689:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.807271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921584113701:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.807287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.807329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825921584113703:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.807333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.808014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.809750Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825921584113704:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:08.873733Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825921584113756:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.923817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:08.960776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.960835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.960883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:08.960911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:08.960935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:08.960960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:08.960986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:08.961011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:08.961036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:08.961059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:08.961086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:08.961112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:08.961137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825921584114011:2347];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:08.964289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825921584113980:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.964314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825921584113980:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:08.964350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825921584113980:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:08.964371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7564825921584113980:2339];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCle ... nternal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.103668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.104926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.106226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.106235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.106242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.107237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.107246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.107248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.108273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.108292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.108295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.109347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.109361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.109364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.110375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.110390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.110393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.111654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.111670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.111672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.112762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.113964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.114748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.114763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.114766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.115104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.115121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.115123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:23.388699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:23.388716Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked |61.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] >> KqpLocksTricky::TestNoWrite [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDSProxyPutTest::TestMirror3dcPutStatusOkWith_3_1_0_VdiskErrors [GOOD] Test command err: 2025-10-24T16:28:27.846066Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:654: [7e4afa7ea38a37be] bootstrap ActorId# [3:83:2129] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-10-24T16:28:27.846156Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:27.846163Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:27.846169Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:27.846173Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:27.846178Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-10-24T16:28:27.846182Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-10-24T16:28:27.849889Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-10-24T16:28:27.849953Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:27.849959Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:27.850011Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:1:0] Marker# BPP01 2025-10-24T16:28:27.850019Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-10-24T16:28:27.850023Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-10-24T16:28:27.850074Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:1:0] Marker# BPP01 2025-10-24T16:28:27.850128Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-10-24T16:28:27.850136Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:27.850140Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:27.850155Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:2:0] Marker# BPP01 2025-10-24T16:28:27.850174Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-10-24T16:28:27.850179Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:27.850182Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:27.850185Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-10-24T16:28:27.850188Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 8 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-10-24T16:28:27.850223Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:1:0:0] Marker# BPP01 2025-10-24T16:28:27.850230Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:2:2:0] Marker# BPP01 2025-10-24T16:28:27.850246Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-10-24T16:28:27.850252Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-10-24T16:28:27.850310Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.561 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.561 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.561 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.282 VDiskId# [0:1:0:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.331 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.365 VDiskId# [0:1:1:1:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.379 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.428 VDiskId# [0:1:2:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.482 VDiskId# [0:1:0:2:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.495 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.507 VDiskId# [0:1:1:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.527 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 4.543 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:1:0:0] NodeId# 3 } TEvVPut{ TimestampMs# 4.544 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:2:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.575 VDiskId# [0:1:1:0:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 4.585 VDiskId# [0:1:2:2:0] NodeId# 3 Status# OK } ] } >> KqpJoinOrder::TPCH9_100 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead |61.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.8%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TKqpScheduler::ZeroQueries [GOOD] >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore [FAIL] >> KqpAnalyze::AnalyzeTable-ColumnStore >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> TKqpScanFetcher::ScanDelayedRetry |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> TKqpScheduler::AddUpdatePools [GOOD] >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> TKqpScheduler::ZeroLimits [GOOD] >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 2250, MsgBus: 12083 2025-10-24T16:28:08.692453Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922111929170:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.692474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee7/r3tmp/tmpB5WrbL/pdisk_1.dat 2025-10-24T16:28:08.744045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.745527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.745556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.748358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:08.756283Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.757334Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825922111929129:2081] 1761323288692121 != 1761323288692124 TServer::EnableGrpc on GrpcPort 2250, node 1 2025-10-24T16:28:08.773142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.773156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.773159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.773205Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12083 TClient is connected to server localhost:12083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.840244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.944015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:09.138012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825926406897107:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.138017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825926406897096:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.138037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.138098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825926406897111:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.138109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.138874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:09.140916Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825926406897110:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:09.201499Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825926406897163:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:09.259697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:09.292501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:09.292556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:09.292630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:09.292658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:09.292690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:09.292715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:09.292737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:09.292776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:09.292797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:09.292818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:09.292846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:09.292874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:09.292903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825926406897387:2337];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:09.293481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-10-24T16:28:09.293498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-10-24T16:28:09.293511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-10-24T16:28:09.293519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-10-24T16:28:09.293538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;de ... ult=not_found; 2025-10-24T16:28:25.024493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.024496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.025724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.025735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.025737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.026886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.026896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.026900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.028528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.028547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.028550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.029696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.029706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.029709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.030791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.030800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.030802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.031835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.031844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.031994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.033227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.033236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.033239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.034335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.034359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.034361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.035458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.035467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.035470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.036584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.036606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.036609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.037716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.037727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.037730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.038820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.038830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.038833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.041059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.041079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.041082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.042739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.042768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.042771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.060965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.060996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.060999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:48 :META:Reads { ShardId: 1001001 KeyRanges { } } 2025-10-24T16:28:29.564484Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:217: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2025-10-24T16:28:29.564514Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:617: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2025-10-24T16:28:29.574731Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:217: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2025-10-24T16:28:29.574767Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:617: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 [GOOD] Test command err: Trying to start YDB, gRPC: 18571, MsgBus: 6243 2025-10-24T16:28:09.663131Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825927504872226:2256];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:09.663163Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:09.671938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee2/r3tmp/tmpppPnyz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18571, node 1 2025-10-24T16:28:09.718553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:09.718566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:09.718567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:09.718605Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:09.721302Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825927504871998:2081] 1761323289660351 != 1761323289660354 TClient is connected to server localhost:6243 2025-10-24T16:28:09.725200Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:6243 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:09.772585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:09.772623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:09.773982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:09.783905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:09.790410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:10.124408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825931799839971:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.124444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.124569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825931799839983:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.124576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825931799839984:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.124580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.125487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.128439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:28:10.128541Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825931799839987:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:10.225739Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825931799840038:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.328606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:10.374114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.374172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.374226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:10.374254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:10.374280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:10.374308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:10.374347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:10.374382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:10.374418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:10.374453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:10.374486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:10.374521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:10.374556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825931799840268:2338];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:10.375946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825931799840278:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.375968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825931799840278:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.375997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825931799840278:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:10.376019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403 ... ult=not_found; 2025-10-24T16:28:25.147797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.147798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.148934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.149688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.149704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.149706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.150394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.151460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.152781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.153937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.154439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.154455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.154458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.155650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.156170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.156185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.156189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; >> TDSProxyLooksLikeLostTheBlob::TDSProxyLooksLikeLostTheBlobBlock42 [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block >> TKqpScheduler::WeightedPools [GOOD] >> TKqpScheduler::WeightedQueries [GOOD] >> TKqpScheduler::DemandIsCutOffByLimit [GOOD] >> TKqpScheduler::AddUpdateQueries [GOOD] >> TKqpScheduler::DeleteQueries [GOOD] >> TKqpScheduler::SingleDatabasePoolQueryStructure [GOOD] >> TKqpScheduler::WeightedDatabase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 61136, MsgBus: 32167 2025-10-24T16:28:09.561205Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825923997697053:2259];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:09.561321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee3/r3tmp/tmpxZfZkZ/pdisk_1.dat 2025-10-24T16:28:09.623622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:09.632962Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61136, node 1 2025-10-24T16:28:09.648035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:09.648049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:09.648051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:09.648101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:09.663827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:09.663873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:09.666479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32167 TClient is connected to server localhost:32167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:09.732795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:09.736969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:09.790585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:10.014307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928292664777:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.014353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.014501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928292664789:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.014512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928292664790:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.014517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.015568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.018418Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825928292664793:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:10.109215Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825928292664844:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.169788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:10.225671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.225723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.225777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:10.225797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:10.225818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:10.225838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:10.225859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:10.225878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:10.225899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:10.225920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:10.225940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:10.225959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:10.225978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825928292665067:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:10.230017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825928292665071:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.230065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825928292665071:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.230111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825928292665071:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:10.230133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825928292665071:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesC ... ult=not_found; 2025-10-24T16:28:25.195023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.195025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.195999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.196008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.196010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.197032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.197041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.197043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.198981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.201289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.201308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.201311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.202289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.202298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.202301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.203284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.203294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.203297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.204609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.204625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.204628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.205660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.205670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.205673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.206651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.206661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.206663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.207687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.207696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.207698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.208255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.208271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.208274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.209324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.209335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.209341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.210406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.210414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.210417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.211365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.211374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:25.211377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> TDSProxyPatchTest::SecuredErrorOnGetItem_Erasure4Plus2Block [GOOD] >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TDSProxyPutTest::TestBlock42PutStatusOkWith_1_1_VdiskErrors [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 31376, MsgBus: 4990 2025-10-24T16:28:22.152472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825981676556016:2262];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:22.152504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d0a/r3tmp/tmpmW602X/pdisk_1.dat 2025-10-24T16:28:22.200159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:22.209582Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31376, node 1 2025-10-24T16:28:22.212353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825981676555762:2081] 1761323302149089 != 1761323302149092 2025-10-24T16:28:22.222523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:22.222535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:22.222537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:22.222570Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4990 2025-10-24T16:28:22.260683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:22.260707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:22.264666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:22.311819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:22.316707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:22.444393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:22.706699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825981676556420:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.706722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825981676556431:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.706733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.706929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825981676556450:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.706940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.707681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:22.710506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:28:22.710600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825981676556449:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:22.768937Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825981676556502:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:22.854115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:22.898225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:22.898290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:22.898358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:22.898377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:22.898395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:22.898419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:22.898440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:22.898461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:22.898485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:22.898506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:22.898526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:22.898546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:22.898568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7564825981676556666:2335];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:22.900519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825981676556667:2336];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:22.900541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825981676556667:2336];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:22.900597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7564825981676556667:2336];tablet_id=72075186224 ... :841: tablet_id=72075186224037969;self_id=[3:7564826005237359493:2455];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414475Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037995;self_id=[3:7564826005237359141:2421];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414485Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037970;self_id=[3:7564826005237359533:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414489Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037970;self_id=[3:7564826005237359533:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414490Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037996;self_id=[3:7564826005237359138:2420];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414495Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037996;self_id=[3:7564826005237359138:2420];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414501Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037971;self_id=[3:7564826005237359485:2451];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414506Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037971;self_id=[3:7564826005237359485:2451];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414511Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037985;self_id=[3:7564826005237359154:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414516Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037985;self_id=[3:7564826005237359154:2434];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414517Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037988;self_id=[3:7564826005237359285:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414522Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037988;self_id=[3:7564826005237359285:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414533Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037989;self_id=[3:7564826005237359150:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414533Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037986;self_id=[3:7564826005237359184:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414537Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037989;self_id=[3:7564826005237359150:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414538Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037986;self_id=[3:7564826005237359184:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414548Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037990;self_id=[3:7564826005237359153:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414552Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037990;self_id=[3:7564826005237359153:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414554Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037987;self_id=[3:7564826005237359156:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414559Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037987;self_id=[3:7564826005237359156:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414565Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037991;self_id=[3:7564826005237359152:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414568Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037991;self_id=[3:7564826005237359152:2432];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414573Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037997;self_id=[3:7564826005237359143:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414578Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037997;self_id=[3:7564826005237359143:2423];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414580Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037992;self_id=[3:7564826005237359151:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414584Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037992;self_id=[3:7564826005237359151:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414594Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037977;self_id=[3:7564826005237359492:2454];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414598Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037977;self_id=[3:7564826005237359492:2454];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414609Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037962;self_id=[3:7564826005237359476:2447];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037962;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414613Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037962;self_id=[3:7564826005237359476:2447];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037962;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414625Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037963;self_id=[3:7564826005237359471:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414628Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037963;self_id=[3:7564826005237359471:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414639Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037980;self_id=[3:7564826005237359515:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414643Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037980;self_id=[3:7564826005237359515:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414654Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;self_id=[3:7564826005237359147:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414657Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;self_id=[3:7564826005237359147:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414668Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037978;self_id=[3:7564826005237359298:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414672Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037978;self_id=[3:7564826005237359298:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414682Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037979;self_id=[3:7564826005237359295:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=manager.cpp:126;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.414685Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037979;self_id=[3:7564826005237359295:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; 2025-10-24T16:28:29.415063Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=manager.cpp:144;event=abort;tx_id=281474976710666;problem=finished; >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9 |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedQueries [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12392, MsgBus: 19564 2025-10-24T16:28:12.083015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825940596586654:2132];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:12.083034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ed3/r3tmp/tmpVdDiMx/pdisk_1.dat 2025-10-24T16:28:12.163288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 12392, node 1 2025-10-24T16:28:12.174065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:12.174077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:12.174079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:12.174117Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:12.178318Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:12.180063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825940596586562:2081] 1761323292082374 != 1761323292082377 TClient is connected to server localhost:19564 2025-10-24T16:28:12.196277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:12.196303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:12.198215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:12.256831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:12.259401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:12.591092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825940596587236:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:12.591121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:12.591224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825940596587248:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:12.591238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825940596587249:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:12.591259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:12.592036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:12.594674Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825940596587252:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:12.649254Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825940596587303:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:12.709985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:12.771807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:12.771878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:12.771915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:12.771945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:12.771967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:12.771986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:12.772010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:12.772034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:12.772063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:12.772086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:12.772110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:12.772130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:12.772156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825940596587530:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:12.777770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825940596587524:2336];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:12.777798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825940596587524:2336];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:12.777839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825940596587524:2336];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:12.777859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7564825940596587524:2336];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:12.777887Z node 1 :TX_COLU ... 715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.878855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.878864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.878867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.881206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.881226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.881228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.882397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.882408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.882410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.883551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.883561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.883564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.885000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.885017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.885019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.886139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.886149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.886152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.887302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.887313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.887315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.888209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.888225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.888227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.889479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.889489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.889492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.890717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.890728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.890730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.892640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.892657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.892660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.893680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.893690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.893692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.894372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.894385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.894387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:26.895435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.112208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:27.112229Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DeleteQueries [GOOD] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedDatabase [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasureMirror3 [GOOD] Test command err: 2025-10-24T16:28:30.616729Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:654: [7e4afa7ea38a37be] bootstrap ActorId# [3:75:2121] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-10-24T16:28:30.616818Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616826Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616830Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616833Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616837Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616841Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616844Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616848Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616852Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616856Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616860Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616864Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616868Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616872Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616876Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616879Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616883Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616889Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.616897Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:30.616913Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:30.616920Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:30.616927Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:30.616930Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:30.616935Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-10-24T16:28:30.616939Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-10-24T16:28:30.616943Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-10-24T16:28:30.616947Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-10-24T16:28:30.616953Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-10-24T16:28:30.616956Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-10-24T16:28:30.616962Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-10-24T16:28:30.616965Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-10-24T16:28:30.625792Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-10-24T16:28:30.625871Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-10-24T16:28:30.625881Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:30.625886Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:30.625892Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:30.625896Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:30.625900Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:30.625904Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625908Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625912Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625916Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625920Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625924Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625928Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625932Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625936Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625940Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625944Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625948Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.625956Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:30.625981Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:30.625989Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:30.626110Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:1:0] Marker# BPP01 2025-10-24T16:28:30.626132Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-10-24T16:28:30.626143Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-10-24T16:28:30.626153Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-10-24T16:28:30.626169Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-10-24T16:28:30.626225Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-10-24T16:28:30.626234Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-10-24T16:28:30.626239Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:30.626243Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:30.626248Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:30.626252Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:30.626256Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:30.626260Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-10-24T16:28:30.626264Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626268Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626273Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626276Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626280Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626287Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:30.626291Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:30.626298Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:30.626303Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:30.626346Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:7:0] Marker# BPP01 2025-10-24T16:28:30.626381Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-10-24T16:28:30.626391Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:490: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-10-24T16:28:30.626464Z node 3 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.316 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.317 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.317 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.317 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.317 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.317 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 9.168 VDiskId# [0:1:0:0:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 9.339 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 9.428 VDiskId# [0:1:0:1:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 9.448 VDiskId# [0:1:0:2:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 9.459 VDiskId# [0:1:0:3:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 9.469 VDiskId# [0:1:0:4:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 9.485 VDiskId# [0:1:0:5:0] NodeId# 3 Status# OK } TEvVPutResult{ TimestampMs# 9.544 VDiskId# [0:1:0:6:0] NodeId# 3 Status# ERROR } TEvVPut{ TimestampMs# 9.624 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 9.663 VDiskId# [0:1:0:7:0] NodeId# 3 Status# OK } ] } |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut/unittest >> TDsProxyQuorumTracker::CheckFailModelErasure3Plus2Block [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] Test command err: 2025-10-24T16:28:28.373923Z node 6 :BS_PROXY_PUT INFO: dsproxy_put.cpp:654: [7e4afa7ea38a37be] bootstrap ActorId# [6:75:2121] Group# 0 BlobCount# 1 BlobIDs# [[72075186224047637:1:863:1:24576:786:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-10-24T16:28:28.374013Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374021Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374025Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374030Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374034Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374038Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374042Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374046Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374050Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374055Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374058Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374062Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374066Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374070Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374074Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374078Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374082Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374088Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.374096Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:28.374112Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:28.374119Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 0 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:28.374125Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:28.374129Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 1 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:28.374134Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG33 2025-10-24T16:28:28.374138Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 2 to# 2 blob Id# [72075186224047637:1:863:1:24576:786:3] Marker# BPG32 2025-10-24T16:28:28.374143Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG33 2025-10-24T16:28:28.374147Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 3 to# 3 blob Id# [72075186224047637:1:863:1:24576:786:4] Marker# BPG32 2025-10-24T16:28:28.374152Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG33 2025-10-24T16:28:28.374156Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 4 to# 4 blob Id# [72075186224047637:1:863:1:24576:786:5] Marker# BPG32 2025-10-24T16:28:28.374162Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG33 2025-10-24T16:28:28.374166Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 5 to# 5 blob Id# [72075186224047637:1:863:1:24576:786:6] Marker# BPG32 2025-10-24T16:28:28.381398Z node 6 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:0:0] Marker# BPP01 2025-10-24T16:28:28.381474Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-10-24T16:28:28.381484Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381489Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381495Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381500Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381504Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381508Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381512Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381516Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381520Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381523Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381527Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381531Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381535Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381539Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381543Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381546Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381550Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381558Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:28.381576Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG33 2025-10-24T16:28:28.381584Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 0 to# 6 blob Id# [72075186224047637:1:863:1:24576:786:1] Marker# BPG32 2025-10-24T16:28:28.381698Z node 6 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075 ... tore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-10-24T16:28:28.381710Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-10-24T16:28:28.381714Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381718Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381722Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381726Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381730Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381733Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381737Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381742Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381746Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381750Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381754Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381758Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381762Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381766Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381770Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381776Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381781Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 6 optimisticState# EBS_FULL Marker# BPG55 2025-10-24T16:28:28.381788Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:324: [7e4afa7ea38a37be] partPlacement record partSituation# ESituation::Unknown to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG33 2025-10-24T16:28:28.381792Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:345: [7e4afa7ea38a37be] Sending missing VPut part# 1 to# 7 blob Id# [72075186224047637:1:863:1:24576:786:2] Marker# BPG32 2025-10-24T16:28:28.381827Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:2:0] Marker# BPP01 2025-10-24T16:28:28.381841Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:4] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:3:0] Marker# BPP01 2025-10-24T16:28:28.381855Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:5] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:4:0] Marker# BPP01 2025-10-24T16:28:28.381869Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# OK ID# [72075186224047637:1:863:1:24576:786:6] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:5:0] Marker# BPP01 2025-10-24T16:28:28.381937Z node 6 :BS_PROXY_PUT INFO: dsproxy_put.cpp:264: [7e4afa7ea38a37be] received {EvVPutResult Status# ERROR ID# [72075186224047637:1:863:1:24576:786:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 }}} from# [0:1:0:6:0] Marker# BPP01 2025-10-24T16:28:28.381944Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 0 part# 0 error Marker# BPG50 2025-10-24T16:28:28.381948Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 1 part# 1 error Marker# BPG50 2025-10-24T16:28:28.381952Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 2 part# 2 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:28.381956Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 3 part# 3 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:28.381960Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 4 part# 4 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:28.381964Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 5 part# 5 situation# ESituation::Present Marker# BPG51 2025-10-24T16:28:28.381968Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:30: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 6 part# 0 error Marker# BPG50 2025-10-24T16:28:28.381972Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381976Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 1 situation# ESituation::Sent Marker# BPG51 2025-10-24T16:28:28.381982Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381986Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 3 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381990Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 4 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381994Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [7e4afa7ea38a37be] Id# [72075186224047637:1:863:1:24576:786:0] restore disk# 7 part# 5 situation# ESituation::Unknown Marker# BPG51 2025-10-24T16:28:28.381998Z node 6 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [7e4afa7ea38a37be] restore Id# [72075186224047637:1:863:1:24576:786:0] optimisticReplicas# 5 optimisticState# EBS_DISINTEGRATED Marker# BPG55 2025-10-24T16:28:28.382029Z node 6 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [7e4afa7ea38a37be] Result# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-10-24T16:28:28.382041Z node 6 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:490: [7e4afa7ea38a37be] SendReply putResult# TEvPutResult {Id# [72075186224047637:1:863:1:24576:786:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 0 BlobId# [72075186224047637:1:863:1:24576:786:0] Reported ErrorReasons# [ ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUPUUU } { OrderNumber# 3 Situations# UUUPUU } { OrderNumber# 4 Situations# UUUUPU } { OrderNumber# 5 Situations# UUUUUP } { OrderNumber# 6 Situations# EUUUUU } { OrderNumber# 7 Situations# USUUUU } ] " ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-10-24T16:28:28.382101Z node 6 :BS_PROXY_PUT NOTICE: {BPP72@dsproxy_put.cpp:474} Query history GroupId# 0 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.739 sample PartId# [72075186224047637:1:863:1:24576:786:6] QueryCount# 1 VDiskId# [0:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.74 sample PartId# [72075186224047637:1:863:1:24576:786:5] QueryCount# 1 VDiskId# [0:1:0:4:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.74 sample PartId# [72075186224047637:1:863:1:24576:786:4] QueryCount# 1 VDiskId# [0:1:0:3:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.74 sample PartId# [72075186224047637:1:863:1:24576:786:3] QueryCount# 1 VDiskId# [0:1:0:2:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.74 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:1:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.74 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 6 } TEvVPutResult{ TimestampMs# 7.991 VDiskId# [0:1:0:0:0] NodeId# 6 Status# ERROR } TEvVPut{ TimestampMs# 8.15 sample PartId# [72075186224047637:1:863:1:24576:786:1] QueryCount# 1 VDiskId# [0:1:0:6:0] NodeId# 6 } TEvVPutResult{ TimestampMs# 8.234 VDiskId# [0:1:0:1:0] NodeId# 6 Status# ERROR } TEvVPut{ TimestampMs# 8.333 sample PartId# [72075186224047637:1:863:1:24576:786:2] QueryCount# 1 VDiskId# [0:1:0:7:0] NodeId# 6 } TEvVPutResult{ TimestampMs# 8.363 VDiskId# [0:1:0:2:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 8.376 VDiskId# [0:1:0:3:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 8.39 VDiskId# [0:1:0:4:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 8.404 VDiskId# [0:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 8.472 VDiskId# [0:1:0:6:0] NodeId# 6 Status# ERROR } ] } >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> GenericFederatedQuery::IcebergHiveSaSelectAll >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 >> GenericFederatedQuery::IcebergHiveTokenSelectAll |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13538, MsgBus: 4162 2025-10-24T16:28:10.583168Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825928702608668:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:10.583190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:10.587754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001eda/r3tmp/tmptYFF6T/pdisk_1.dat 2025-10-24T16:28:10.624854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:10.624896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:10.625971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:10.638290Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825928702608639:2081] 1761323290582963 != 1761323290582966 TServer::EnableGrpc on GrpcPort 13538, node 1 2025-10-24T16:28:10.642304Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:10.648603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:10.648612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:10.648615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:10.648662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4162 2025-10-24T16:28:10.684549Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:10.716553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:10.995489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928702609312:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.995513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.995768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928702609326:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.995776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.995814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825928702609320:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.996531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.998269Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825928702609328:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:11.089212Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825932997576675:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:11.153678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:11.187440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564825932997576928:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:11.190629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:11.190683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:11.190727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:11.190754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:11.190775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:11.190798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:11.190820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:11.190842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:11.190863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:11.190894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:11.190916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:11.190937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:11.190958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825932997576915:2338];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:11.191724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564825932997576928:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:11.191827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564825932997576928:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:11.191866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7564825932997576928:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 20 ... ult=not_found; 2025-10-24T16:28:27.490014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.490016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.491277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.491293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.491296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.492375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.492397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.492399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.493517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.493525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.493527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.494545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.494554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.494556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.495938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.495947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.495950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.497011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.497029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.497032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.498063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.498071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.498074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.499366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.499374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.499376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.500447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.500465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.500470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.501489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.501497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.501499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.502424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.502432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.502434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.503421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.503430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.503432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.504810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.504832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.504834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.505802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.505810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.505812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.506807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.506815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:27.506817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-10-24T16:27:48.465948Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:48.477102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:48.477120Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:48.477435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:48.477470Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:48.477507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:48.478227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:48.485619Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:48.485664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:48.485761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:48.485768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:48.485772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:48.485817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:48.485824Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:48.485831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:48.521358Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:48.524603Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:48.524675Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:48.524695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:48.524701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:48.524706Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:48.524712Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:48.524757Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.524762Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.524813Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:48.524831Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:48.524836Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:48.524841Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:48.524846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:48.524850Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:48.524855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:48.524859Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:48.524862Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:48.524869Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.524872Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.524876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:48.525213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:48.525219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:48.525225Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:48.525247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:48.525254Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:48.525261Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:48.525265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:48.525268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:48.525272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:48.525274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:48.525318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:48.525321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:48.525323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:48.525326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:48.525332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:48.525335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:48.525338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:48.525340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:48.525344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:48.535931Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:48.535948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:48.535952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:48.535960Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:48.535969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:48.536035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.536040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:48.536045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:48.536058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:48.536061Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:48.536082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:48.536086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:48.536089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:48.536092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:48.538104Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:48.538121Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:48.538161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.538165Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:48.538171Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:48.538176Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:48.538180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:48.538185Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:48.538189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... t [32:347:2315]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-10-24T16:28:31.079600Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079604Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-10-24T16:28:31.079614Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-10-24T16:28:31.079618Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079621Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-10-24T16:28:31.079630Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-10-24T16:28:31.079633Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079637Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-10-24T16:28:31.079648Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-10-24T16:28:31.079651Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079655Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-10-24T16:28:31.079665Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-10-24T16:28:31.079669Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079673Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-10-24T16:28:31.079697Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-10-24T16:28:31.079700Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079704Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-10-24T16:28:31.079715Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-10-24T16:28:31.079718Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079724Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-10-24T16:28:31.079733Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-10-24T16:28:31.079737Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079740Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-10-24T16:28:31.079751Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-10-24T16:28:31.079755Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079758Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-10-24T16:28:31.079768Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-10-24T16:28:31.079772Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079775Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-10-24T16:28:31.079786Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-10-24T16:28:31.079789Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079792Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-10-24T16:28:31.079801Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-10-24T16:28:31.079805Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079808Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-10-24T16:28:31.079819Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-10-24T16:28:31.079822Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079826Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-10-24T16:28:31.079836Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-10-24T16:28:31.079841Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079845Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-10-24T16:28:31.079870Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-10-24T16:28:31.079875Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079879Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-10-24T16:28:31.079890Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-10-24T16:28:31.079894Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079898Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-10-24T16:28:31.079910Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-10-24T16:28:31.079914Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079918Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-10-24T16:28:31.079930Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-10-24T16:28:31.079934Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079938Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-10-24T16:28:31.079949Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-10-24T16:28:31.079953Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079957Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-10-24T16:28:31.079969Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2315]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-10-24T16:28:31.079973Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:31.079977Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 26 29 24 25 28 28 26 29 28 29 10 31 31 18 26 - 22 26 26 31 31 18 26 26 26 18 - 15 - - - - actual 26 29 24 25 28 28 26 29 28 29 10 31 31 18 26 - 22 26 26 31 31 18 26 26 26 18 - 15 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |61.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |61.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |61.9%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> KqpLocksTricky::TestSnapshotIfInsertRead [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot >> KqpSinkMvcc::OlapMultiSinks [GOOD] |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> THealthCheckTest::StaticGroupIssue |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::ClickHouseManagedSelectAll >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant >> THealthCheckTest::TestReBootingTabletIsDead >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 20023, MsgBus: 4860 2025-10-24T16:28:21.822111Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825976496468982:2076];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:21.822128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:21.826218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d0b/r3tmp/tmp82Khoj/pdisk_1.dat 2025-10-24T16:28:21.852307Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:21.852467Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825976496468944:2081] 1761323301821785 != 1761323301821788 TServer::EnableGrpc on GrpcPort 20023, node 1 2025-10-24T16:28:21.865490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:21.865499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:21.865501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:21.865541Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4860 TClient is connected to server localhost:4860 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:21.923407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:21.927792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:21.927816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-10-24T16:28:21.928928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:21.938029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:22.228805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825980791436899:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.228828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825980791436925:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.228835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.228918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825980791436929:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.228930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:22.229532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:22.230931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825980791436928:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:22.320035Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825980791436981:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:22.377312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:22.399800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825980791437146:2333];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:22.405027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:22.405075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:22.405119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:22.405141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:22.405161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:22.405192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:22.405215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:22.405234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:22.405260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:22.405280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:22.405300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:22.405322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:22.405343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7564825980791437145:2332];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:22.406021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825980791437146:2333];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:22.406053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825980791437146:2333];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:22.406079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825980791437146:2333];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 20 ... ;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.285273Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.285277Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.285405Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.285416Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.285419Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286526Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286536Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286539Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286704Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286715Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.286719Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288007Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288017Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288020Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288246Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288258Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.288260Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.289418Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.289440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.289443Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.290712Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.290723Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.290727Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.292090Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.292101Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.292104Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293119Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293130Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293133Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293617Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293629Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.293632Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294020Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294027Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294029Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294940Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294951Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.294955Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.295359Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.295368Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.295371Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296279Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296301Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296303Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296429Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; 2025-10-24T16:28:32.296442Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=8;result=not_found; >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21575, MsgBus: 13298 2025-10-24T16:28:08.373480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825922724165799:2081];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.373502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f0b/r3tmp/tmpPWxve9/pdisk_1.dat 2025-10-24T16:28:08.453478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.454502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.454524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.455699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:08.471463Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21575, node 1 2025-10-24T16:28:08.481068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.481081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.481083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.481132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13298 TClient is connected to server localhost:13298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:08.540725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:08.543283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.662766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.827925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922724166423:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922724166434:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.827956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.828090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825922724166438:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.828109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:08.828814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:08.830868Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825922724166437:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:08.898320Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825922724166490:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:08.959359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:08.995456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825922724166718:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:08.999984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:09.000043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:09.000099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:09.000126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:09.000149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:09.000172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:09.000195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:09.000217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:09.000239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:09.000260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:09.000282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:09.000303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:09.000324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;self_id=[1:7564825922724166713:2337];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:09.001204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825922724166718:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:09.001366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825922724166718:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:09.001398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7564825922724166718:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesC ... nternal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.506563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.506819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.506837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.506840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.507665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.507684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.507686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.508777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.509856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.510898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.511931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.511950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.511952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.512238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.512255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.512258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.513367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.514498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.514518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.514520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.515736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.515756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.515759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.516539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.516560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:22.516563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:23.421459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:23.421479Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TxUsage::WriteToTopic_Demo_43_Table >> TxUsage::WriteToTopic_Demo_24_Table |62.0%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::Issues100Groups100VCardListing >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> THealthCheckTest::DatabaseDoesNotExist >> KqpExplain::CompoundKeyRange [GOOD] >> THealthCheckTest::OneIssueListing >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-10-24T16:27:53.826710Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:53.838919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:53.838938Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:53.839309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:53.839347Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:53.839386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:53.840161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:53.848350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:53.848389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:53.848523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:53.848535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:53.848542Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:53.848615Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:53.848624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:53.848633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:53.884968Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:53.888071Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:53.888127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:53.888145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:53.888149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:53.888153Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:53.888156Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.888195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.888200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.888249Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:53.888265Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:53.888270Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.888275Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:53.888280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:53.888284Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:53.888290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:53.888293Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:53.888297Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:53.888304Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.888307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.888312Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:53.888714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:53.888736Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:53.888751Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:53.888788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:53.888799Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:53.888810Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:53.888817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:53.888821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:53.888827Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:53.888835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.888891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:53.888896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:53.888900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:53.888905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.888914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:53.888918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:53.888922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:53.888926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.888932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:53.899512Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:53.899531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.899537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.899548Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:53.899560Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:53.899650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.899658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.899664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:53.899681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:53.899686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:53.899726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.899734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:53.899738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:53.899743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:53.901775Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:53.901794Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.901837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.901842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.901850Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.901856Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:53.901860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:53.901867Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:53.901871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100000 ... 2:349:2317]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-10-24T16:28:34.795210Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795215Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-10-24T16:28:34.795228Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-10-24T16:28:34.795232Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795237Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-10-24T16:28:34.795250Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-10-24T16:28:34.795254Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795260Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-10-24T16:28:34.795274Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-10-24T16:28:34.795278Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795282Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-10-24T16:28:34.795308Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-10-24T16:28:34.795312Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795317Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-10-24T16:28:34.795331Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-10-24T16:28:34.795335Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795339Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-10-24T16:28:34.795354Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-10-24T16:28:34.795358Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795362Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-10-24T16:28:34.795373Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-10-24T16:28:34.795377Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795381Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-10-24T16:28:34.795394Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-10-24T16:28:34.795397Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795401Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-10-24T16:28:34.795414Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-10-24T16:28:34.795420Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795424Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-10-24T16:28:34.795436Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-10-24T16:28:34.795439Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795443Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-10-24T16:28:34.795455Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-10-24T16:28:34.795459Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795463Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-10-24T16:28:34.795496Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-10-24T16:28:34.795500Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795504Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-10-24T16:28:34.795513Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-10-24T16:28:34.795517Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795520Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-10-24T16:28:34.795532Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-10-24T16:28:34.795536Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795540Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-10-24T16:28:34.795553Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-10-24T16:28:34.795557Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795562Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-10-24T16:28:34.795578Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-10-24T16:28:34.795582Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795586Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-10-24T16:28:34.795600Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-10-24T16:28:34.795604Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795608Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-10-24T16:28:34.795622Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-10-24T16:28:34.795626Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795630Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-10-24T16:28:34.795644Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [32:241:2233], Recipient [32:349:2317]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-10-24T16:28:34.795648Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:34.795652Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 24 27 31 30 31 28 29 25 29 30 29 27 25 26 29 27 26 21 26 29 29 29 19 22 2 18 29 18 0 29 - - actual 24 27 31 30 31 28 29 25 29 30 29 27 25 26 29 27 26 21 26 29 29 29 19 22 2 18 29 18 0 29 - - interm 5 5 6 5 - 0 0 5 - 5 5 - 5 6 6 5 6 5 6 - 2 6 - 0 2 0 0 - 0 - - - >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit95 >> TxUsage::ReadRuleGeneration [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] Test command err: Trying to start YDB, gRPC: 1233, MsgBus: 26249 2025-10-24T16:28:08.645151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825923006150400:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:08.645171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee6/r3tmp/tmpwcnwdV/pdisk_1.dat 2025-10-24T16:28:08.706520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:08.723396Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:08.723870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825923006150373:2081] 1761323288644822 != 1761323288644825 TServer::EnableGrpc on GrpcPort 1233, node 1 2025-10-24T16:28:08.734293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:08.734306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:08.734308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:08.734360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26249 2025-10-24T16:28:08.760853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:08.760884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:08.761796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:08.812756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:08.815471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:08.827534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.846245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.866713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:08.880163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:08.880274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:09.064910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927301119317:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.064937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.065116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927301119327:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.065124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.118126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.130208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.139130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.154280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.168397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.183522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.196392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.212591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:09.226655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927301120189:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.226689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.226708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927301120194:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.226714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825927301120196:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:09.226724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Da ... GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:29.813736Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgspqgcqpjdgr9b47bq2fd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:29.821803Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826010249696608:5367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:29.822901Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgspqra6q6724wc1zvxnnb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:30.880684Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826014544664298:5480], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:30.881596Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsqrv8aenta20a9pbcfam, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:31.353729Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826018839631797:5539], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:31.354031Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsr7mfrabewh27sfgnjdf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:31.360542Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826018839631812:5546], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:31.360627Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsr7v9v760fc67pyss5z7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:31.377743Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826018839631825:5552], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:31.378828Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsr88ap936v1kd9f2hqn4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:32.331774Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826023134599510:5663], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:32.332475Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgss669t69re93rrhv6g6h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:32.338689Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826023134599525:5670], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:32.339904Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgss6dbd2wam446nkzrrab, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:32.777023Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826023134599730:5730], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:32.777686Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgssm4e4nf0dt7dkh8m79j, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:32.784050Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826023134599743:5736], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:32.784930Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgssma0vx2np765gse7rcm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:33.896997Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826027429567433:5849], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:33.898111Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgstpa57dm6zrhtgtxmpht, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:34.521278Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826031724534936:5909], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:34.522314Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsvabc1jag29thckvwtms, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:34.529283Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826031724534949:5915], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:34.530461Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsvava539d83r46pg1vk9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-10-24T16:28:34.553667Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7564826031724534962:5921], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-10-24T16:28:34.554804Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=2&id=N2IwNTJiODYtMTE3YTk3MDUtMTllMGM5ODQtMjkzMzFkNDI=, ActorId: [2:7564825932940275686:2521], ActorState: ExecuteState, TraceId: 01k8bgsvb34jdnxfcf6x0y3p0s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TNodeBrokerTest::Test1000NodesSubscribers >> THealthCheckTest::Issues100GroupsListing >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead |62.0%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> TNodeBrokerTest::NodesMigrationRemovedChanged >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> TNodeBrokerTest::SingleDomainModeBannedIds >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: Trying to start YDB, gRPC: 27777, MsgBus: 1129 2025-10-24T16:28:32.325016Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826022953580055:2142];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:32.325100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:32.328121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f46/r3tmp/tmpVQzbfX/pdisk_1.dat 2025-10-24T16:28:32.359236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:32.359276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:32.368399Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:32.371001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:32.371583Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826022953579950:2081] 1761323312321907 != 1761323312321910 TServer::EnableGrpc on GrpcPort 27777, node 1 2025-10-24T16:28:32.392490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:32.392503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:32.392506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:32.392552Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1129 2025-10-24T16:28:32.433900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:32.485775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:32.487000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:32.487366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:32.487655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:32.488357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323312532, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:32.488600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710657:0 2025-10-24T16:28:32.488609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-10-24T16:28:32.488647Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826022953580466:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 3 2025-10-24T16:28:32.488723Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022953579918:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.488761Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022953579921:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.488772Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022953579924:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.488827Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022953580519:2267][/Root] Path was updated to new version: owner# [1:7564826022953580220:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.488925Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826022953580466:2247] Ack update: ack to# [1:7564826022953580314:2152], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:32.488953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-10-24T16:28:32.489323Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022953580490:2287][/Root] Path was updated to new version: owner# [1:7564826022953580484:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.489368Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022953580491:2288][/Root] Path was updated to new version: owner# [1:7564826022953580485:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.489566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23134, MsgBus: 24567 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f46/r3tmp/tmpIFksg0/pdisk_1.dat 2025-10-24T16:28:32.926206Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564826026441167872:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:32.926346Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:32.940981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:32.941010Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:32.943910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:32.944522Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:32.944731Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564826026441167750:2081] 1761323312923454 != 1761323312923457 2025-10-24T16:28:32.948299Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23134, node 2 2025-10-24T16:28:32.960630Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:32.960646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:32.960648Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:32.960698Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24567 TClient is connected to server localhost:24567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 Pat ... in>: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.124888Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.125122Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564826042614456108:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.125133Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.202976Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.227753Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.245107Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.263445Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.300497Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.315173Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.341314Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.356181Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.377547Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564826042614456969:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.377577Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.377744Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564826042614456975:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.377754Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7564826042614456974:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.377760Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:36.378819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:36.383559Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-10-24T16:28:36.383722Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7564826042614456978:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:36.477554Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:7564826042614457030:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:36.600761Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:36.831826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.939712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:28:37.011963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.092274Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.186859Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:28:37.261626Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-10-24T16:28:37.396477Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.414858Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-10-24T16:28:37.767161Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710705:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-10-24T16:28:37.781889Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [4:7564826046909425752:2804], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2025-10-24T16:28:37.783003Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=4&id=YmEzMjc3NjgtZmE3ZjZiZTYtMjZkZTNjNWYtNTMxODQ3NzQ=, ActorId: [4:7564826046909425750:2803], ActorState: ExecuteState, TraceId: 01k8bgsygg2akpt9hsth6pe5wm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot [GOOD] >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> THealthCheckTest::BridgeTwoGroups >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::TestNodeDisconnected >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot [GOOD] Test command err: Trying to start YDB, gRPC: 11727, MsgBus: 8966 2025-10-24T16:28:23.649869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:23.733134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:23.733224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:23.733235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d06/r3tmp/tmpzauhei/pdisk_1.dat 2025-10-24T16:28:23.852819Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:23.853807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:23.853835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:23.853948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323302988290 != 1761323302988293 2025-10-24T16:28:23.886631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11727, node 1 2025-10-24T16:28:23.949246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:23.949265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:23.949270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:23.949368Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8966 TClient is connected to server localhost:8966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-10-24T16:28:24.044472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:24.048954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:24.137127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.324819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.435843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:24.582672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:24.779167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.084065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1706:3311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.084102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.084210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1779:3330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.084216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.089147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.257350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.486278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.686253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.914795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.121341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.432064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.665158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.969284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2591:3971], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.969335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.969400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2596:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.969503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2597:3977], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have acce ... ptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:34.128484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:34.201375Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:34.406946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:34.536382Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:34.697717Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:34.905202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:35.235814Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1701:3309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.235885Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.236062Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1774:3328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.236073Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.240834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.439750Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.692655Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.934953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.180074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.439094Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.752428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:36.973720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.258759Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2587:3969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:37.258809Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:37.258879Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2592:3974], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:37.258913Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2593:3975], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:37.258985Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:37.260284Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:37.425767Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2596:3978], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:37.480859Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:2657:4020] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:37.777080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.960463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:38.218079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-10-24T16:27:52.957998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:52.969309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:52.969326Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:52.969649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:52.969684Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:52.969740Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:52.970459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:52.977929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:52.977963Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:52.978049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:52.978055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:52.978059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:52.978103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:52.978109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:52.978116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:53.013835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:53.017419Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:53.017488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:53.017505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:53.017509Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:53.017512Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:53.017516Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.017558Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.017563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.017616Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:53.017634Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:53.017640Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.017645Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:53.017650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:53.017653Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:53.017658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:53.017661Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:53.017664Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:53.017672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.017675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.017679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:53.018038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:53.018043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:53.018049Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:53.018072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:53.018079Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:53.018085Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:53.018090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:53.018092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:53.018095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:53.018098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.018145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:53.018148Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:53.018150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:53.018153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.018158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:53.018161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:53.018163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:53.018165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.018169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:53.028923Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:53.028947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:53.028953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:53.029004Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:53.029020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:53.029122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.029130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:53.029137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:53.029155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:53.029159Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:53.029193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:53.029200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:53.029204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:53.029208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:53.032224Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:53.032250Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:53.032311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.032317Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:53.032325Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:53.032331Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:53.032336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:53.032343Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:53.032347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100 ... datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 399} 2025-10-24T16:28:36.563836Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-10-24T16:28:36.563940Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437185 2025-10-24T16:28:36.563948Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-10-24T16:28:36.563954Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000004:403] at 9437185 on unit StoreAndSendOutRS 2025-10-24T16:28:36.563962Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 400 at 9437185 from 9437185 to 9437186 txId 403 2025-10-24T16:28:36.563973Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-10-24T16:28:36.563981Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2025-10-24T16:28:36.563990Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:105:2138], exec latency: 0 ms, propose latency: 2 ms 2025-10-24T16:28:36.563998Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.564002Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-10-24T16:28:36.564144Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [4:351:2318], Recipient [4:240:2232]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 399} 2025-10-24T16:28:36.564152Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:36.564159Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 402 2025-10-24T16:28:36.564205Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [4:351:2318], Recipient [4:240:2232]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.564209Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:36.564214Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 2025-10-24T16:28:36.564227Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [4:351:2318], Recipient [4:462:2404]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-10-24T16:28:36.564232Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-10-24T16:28:36.564237Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437185 dest 9437186 producer 9437185 txId 403 2025-10-24T16:28:36.564256Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-10-24T16:28:36.564268Z node 4 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:403] from=9437185 to=9437186origin=9437185 2025-10-24T16:28:36.564284Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-10-24T16:28:36.564366Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [4:462:2404], Recipient [4:462:2404]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:36.564373Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:36.564380Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-10-24T16:28:36.564388Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:28:36.564395Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:403] at 9437186 for LoadAndWaitInRS 2025-10-24T16:28:36.564401Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000004:403] at 9437186 on unit LoadAndWaitInRS 2025-10-24T16:28:36.564414Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000004:403] at 9437186 is Executed 2025-10-24T16:28:36.564420Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000004:403] at 9437186 executing on unit LoadAndWaitInRS 2025-10-24T16:28:36.564424Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000004:403] at 9437186 to execution unit BlockFailPoint 2025-10-24T16:28:36.564429Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000004:403] at 9437186 on unit BlockFailPoint 2025-10-24T16:28:36.564434Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000004:403] at 9437186 is Executed 2025-10-24T16:28:36.564438Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000004:403] at 9437186 executing on unit BlockFailPoint 2025-10-24T16:28:36.564442Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000004:403] at 9437186 to execution unit ExecuteDataTx 2025-10-24T16:28:36.564446Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000004:403] at 9437186 on unit ExecuteDataTx 2025-10-24T16:28:36.564939Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:305: Executed operation [1000004:403] at tablet 9437186 with status COMPLETE 2025-10-24T16:28:36.564955Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Datashard execution counters for [1000004:403] at 9437186: {NSelectRow: 1, NSelectRange: 2, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 45, SelectRangeBytes: 360, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-10-24T16:28:36.564973Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000004:403] at 9437186 is ExecutedNoMoreRestarts 2025-10-24T16:28:36.564978Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000004:403] at 9437186 executing on unit ExecuteDataTx 2025-10-24T16:28:36.564983Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000004:403] at 9437186 to execution unit CompleteOperation 2025-10-24T16:28:36.564989Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000004:403] at 9437186 on unit CompleteOperation 2025-10-24T16:28:36.565091Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000004:403] at 9437186 is DelayComplete 2025-10-24T16:28:36.565096Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000004:403] at 9437186 executing on unit CompleteOperation 2025-10-24T16:28:36.565101Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000004:403] at 9437186 to execution unit CompletedOperations 2025-10-24T16:28:36.565105Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [1000004:403] at 9437186 on unit CompletedOperations 2025-10-24T16:28:36.565113Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [1000004:403] at 9437186 is Executed 2025-10-24T16:28:36.565117Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000004:403] at 9437186 executing on unit CompletedOperations 2025-10-24T16:28:36.565122Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [1000004:403] at 9437186 has finished 2025-10-24T16:28:36.565126Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:36.565131Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-10-24T16:28:36.565136Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-10-24T16:28:36.565141Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-10-24T16:28:36.581968Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:28:36.581994Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-10-24T16:28:36.582017Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:105:2138], exec latency: 3 ms, propose latency: 5 ms 2025-10-24T16:28:36.582036Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.582044Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:28:36.582178Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [4:240:2232], Recipient [4:462:2404]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.582188Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:36.582195Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2025-10-24T16:28:36.594687Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-10-24T16:28:36.594714Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-10-24T16:28:36.594736Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:105:2138], exec latency: 3 ms, propose latency: 5 ms 2025-10-24T16:28:36.594751Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.594758Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-10-24T16:28:36.594914Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [4:462:2404], Recipient [4:351:2318]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-10-24T16:28:36.594923Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:36.594929Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-10-24T16:28:38.680114Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.685535Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.687242Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.688284Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.710317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:38.710350Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-10-24T16:28:38.716588Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:38.717070Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:38.717175Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-10-24T16:28:38.717376Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:38.718206Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-10-24T16:28:38.718611Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-10-24T16:28:38.718684Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-10-24T16:28:38.718701Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.718707Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-10-24T16:28:38.718724Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-10-24T16:28:38.718763Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-10-24T16:28:38.718770Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-10-24T16:28:38.718774Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-10-24T16:28:38.718780Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.718797Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-10-24T16:28:38.718803Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-10-24T16:28:38.761728Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-10-24T16:28:38.761781Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-10-24T16:28:38.761795Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-10-24T16:28:38.761806Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-10-24T16:28:38.761915Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:197:2197], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:38.761938Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:197:2197] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } ... waiting for nameservers are connected (done) 2025-10-24T16:28:38.762601Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-10-24T16:28:38.762619Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:38.762632Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.762687Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:206:2204], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:38.762718Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039946, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigRequest { Config { BannedNodeIds { From: 1025 To: 1032 } } } 2025-10-24T16:28:38.762724Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:256: StateWork, processing event TEvNodeBroker::TEvSetConfigRequest 2025-10-24T16:28:38.762745Z node 1 :NODE_BROKER DEBUG: node_broker__update_config.cpp:53: TTxUpdateConfig Execute Config { BannedNodeIds { From: 1025 To: 1032 } } 2025-10-24T16:28:38.762772Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1293: [DB] Update config in database config=BannedNodeIds { From: 1025 To: 1032 } 2025-10-24T16:28:38.773661Z node 1 :NODE_BROKER DEBUG: node_broker__update_config.cpp:85: TTxUpdateConfig Complete 2025-10-24T16:28:38.773723Z node 1 :NODE_BROKER TRACE: node_broker__update_config.cpp:92: TTxUpdateConfig reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvSetConfigResponse { Status { Code: OK } } 2025-10-24T16:28:38.773831Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:210:2208], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:38.773849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:28:38.773856Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:38.773869Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.773928Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:212:2210], Recipient [1:172:2179]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:38.773964Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:204:2202], Recipient [1:172:2179]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-10-24T16:28:38.773973Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-10-24T16:28:38.773985Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-10-24T16:28:38.774048Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:28:38.774075Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046678944 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:28:38.808923Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2380: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-10-24T16:28:38.817519Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2647: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-10-24T16:28:38.817654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2522: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { ... 1:34:2065] ServerId: [1:197:2197] } 2025-10-24T16:28:39.721446Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:39.722434Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:39.722492Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:39.722905Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-10-24T16:28:39.722960Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-10-24T16:28:39.723020Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-10-24T16:28:39.723111Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:965: [DB] Loaded config: BannedNodeIds { From: 1024 To: 1029 } BannedNodeIds { From: 1031 To: 1032 } 2025-10-24T16:28:39.723122Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:995: [DB] Loaded current epoch: #3.8 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-10-24T16:28:39.723128Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1024: [DB] Loaded approximate epoch start: #3.8 2025-10-24T16:28:39.723132Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1041: [DB] Loaded main nodes table: Nodes 2025-10-24T16:28:39.723160Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:291: [Dirty] Added expired node #1024.v0 host1:1001 2025-10-24T16:28:39.723185Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1120: [DB] Loaded node #1024.v0 { NodeId: 1024, State: Expired, Version: 0, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723210Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1030.v0 host3:1001 2025-10-24T16:28:39.723218Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1120: [DB] Loaded node #1030.v0 { NodeId: 1030, State: Active, Version: 0, Host: host3, Port: 1001, ResolveHost: host3.yandex.net, Address: 1.2.3.6, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=6/, AuthorizedByCertificate: 0, SlotIndex: 2, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723225Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:281: [Dirty] Added node #1033.v0 host2:1001 2025-10-24T16:28:39.723231Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1120: [DB] Loaded node #1033.v0 { NodeId: 1033, State: Active, Version: 0, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 1, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723252Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1197: [DB] Loaded nodeV2 #1024.v8 { NodeId: 1024, State: Expired, Version: 8, Host: host1, Port: 1001, ResolveHost: host1.yandex.net, Address: 1.2.3.4, Lease: 1, Expire: Thu, 01 Jan 1970 02:00:00 UTC, Location: DC=1/M=2/R=3/U=4/, AuthorizedByCertificate: 0, SlotIndex: 0, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723258Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1246: [DB] Node #1024.v8 is already migrated 2025-10-24T16:28:39.723268Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1197: [DB] Loaded nodeV2 #1030.v6 { NodeId: 1030, State: Active, Version: 6, Host: host3, Port: 1001, ResolveHost: host3.yandex.net, Address: 1.2.3.6, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=6/, AuthorizedByCertificate: 0, SlotIndex: 2, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723272Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1246: [DB] Node #1030.v6 is already migrated 2025-10-24T16:28:39.723282Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1197: [DB] Loaded nodeV2 #1033.v7 { NodeId: 1033, State: Active, Version: 7, Host: host2, Port: 1001, ResolveHost: host2.yandex.net, Address: 1.2.3.5, Lease: 2, Expire: Thu, 01 Jan 1970 03:00:00 UTC, Location: DC=1/M=2/R=3/U=5/, AuthorizedByCertificate: 0, SlotIndex: 1, ServicedSubDomain: 72057594046678944:1 } 2025-10-24T16:28:39.723286Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1246: [DB] Node #1033.v7 is already migrated 2025-10-24T16:28:39.723300Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-10-24T16:28:39.723315Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-10-24T16:28:39.723320Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-10-24T16:28:39.723324Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-10-24T16:28:39.723328Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-10-24T16:28:39.723348Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T03:00:00.025000Z 2025-10-24T16:28:39.723355Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #3.8 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z, approximate epoch start #3.8 nodes=2 expired=1 2025-10-24T16:28:39.723373Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##3.8 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z nodes=2 expired=1 removed=0 2025-10-24T16:28:39.723378Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1030.v6 to update nodes log 2025-10-24T16:28:39.723386Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1033.v7 to update nodes log 2025-10-24T16:28:39.723393Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:659: Add node #1024.v8 to update nodes log 2025-10-24T16:28:39.737170Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:365:2323] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:39.737253Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:365:2323], Recipient [1:331:2296]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:39.737285Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:366:2324], Recipient [1:331:2296]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:39.737333Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:331:2296]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-10-24T16:28:39.737342Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:39.737350Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:375: Delaying list nodes request for epoch #4 2025-10-24T16:28:39.737399Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039938, Sender [1:204:2202], Recipient [1:331:2296]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" } 2025-10-24T16:28:39.737406Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:251: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-10-24T16:28:39.737418Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1494: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1" 2025-10-24T16:28:39.737490Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-10-24T16:28:39.737528Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:314:2291] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-10-24T16:28:39.737595Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [1:368:2325], recipient# [1:367:2296], result# { ErrorCount: 0 DatabaseName: dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-10-24T16:28:39.737613Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1563: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-10-24T16:28:39.737629Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1589: Finished resolving tenant: request# Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-10-24T16:28:39.737645Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435073, Sender [1:367:2296], Recipient [1:331:2296]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-10-24T16:28:39.737651Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:260: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-10-24T16:28:39.737678Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:78: TTxRegisterNode Execute 2025-10-24T16:28:39.737684Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:82: Registration request from host4:1001 (not fixed) tenant: dc-1 2025-10-24T16:28:39.737696Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-10-24T16:28:39.737720Z node 1 :NODE_BROKER DEBUG: node_broker__register_node.cpp:197: TTxRegisterNode Complete 2025-10-24T16:28:39.737736Z node 1 :NODE_BROKER TRACE: node_broker__register_node.cpp:59: TTxRegisterNode reply with: Status { Code: ERROR_TEMP Reason: "No free node IDs" } |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> TReplicationWithRebootsTests::AlterReplicationConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 17162, MsgBus: 2826 2025-10-24T16:28:19.441073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:19.441260Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825971083656157:2239];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:19.441546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d28/r3tmp/tmpeQ4mSC/pdisk_1.dat 2025-10-24T16:28:19.492342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:19.492369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:19.496240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:19.506750Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:19.507913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825971083655933:2081] 1761323299423654 != 1761323299423657 TServer::EnableGrpc on GrpcPort 17162, node 1 2025-10-24T16:28:19.520277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:19.520296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:19.520299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:19.520350Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2826 2025-10-24T16:28:19.552748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:19.617189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:19.621012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:19.915028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825971083656606:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.915070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.915308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825971083656618:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.915339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825971083656619:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.915348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:19.916427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:19.920111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-10-24T16:28:19.920261Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825971083656622:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-10-24T16:28:19.989018Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825971083656673:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:20.145768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.186119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.423175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:20.443949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:24.440909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7564825971083656157:2239];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:24.440944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:28:34.469860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:34.469878Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:35.962544Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1547: SelfId: [1:7564826039803142223:2954], TxId: 281474976715680, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgswqm804ee78ky8zq9c8w. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=N2UyZDFjOWEtY2Q0MjI4OGQtYmJmMTc5NjgtMjU5YTRlODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1761323300772/18446744073709551615 shard 72075186224037889 with lowWatermark v1761323300821/18446744073709551615 (node# 1 state# Ready) } } 2025-10-24T16:28:35.962744Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:7564826039803142223:2954], TxId: 281474976715680, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgswqm804ee78ky8zq9c8w. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=N2UyZDFjOWEtY2Q0MjI4OGQtYmJmMTc5NjgtMjU5YTRlODU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1761323300772/18446744073709551615 shard 72075186224037889 with lowWatermark v1761323300821/18446744073709551615 (node# 1 state# Ready) } }. 2025-10-24T16:28:35.962961Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=N2UyZDFjOWEtY2Q0MjI4OGQtYmJmMTc5NjgtMjU5YTRlODU=, ActorId: [1:7564825975378631653:2954], ActorState: ExecuteState, TraceId: 01k8bgswqm804ee78ky8zq9c8w, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 32279, MsgBus: 17669 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d28/r3tmp/tmpG83xzy/pdisk_1.dat 2025-10-24T16:28:36.547980Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564826044145815085:2098];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:36.548015Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:36.550288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.561724Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on ... completed, doublechecking } 2025-10-24T16:28:37.085110Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564826048440782897:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:37.113043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.144901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.413483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:37.552229Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:39.696314Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3722: SelfId: [2:7564826057030725317:2954], SessionActorId: [2:7564826048440790651:2954], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[2:7564826048440790651:2954]. 2025-10-24T16:28:39.696378Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2187: SessionId: ydb://session/3?node_id=2&id=Yjg2N2UwMDktNTM1Mzg2ZWItZWE5YzVjNDEtNWM3ZDViMzA=, ActorId: [2:7564826048440790651:2954], ActorState: ExecuteState, TraceId: 01k8bgt0b54mcgt3mj1042n0gz, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7564826057030725318:2954] from: [2:7564826057030725317:2954] 2025-10-24T16:28:39.696398Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [2:7564826057030725318:2954] TxId: 281474976710666. Ctx: { TraceId: 01k8bgt0b54mcgt3mj1042n0gz, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Yjg2N2UwMDktNTM1Mzg2ZWItZWE5YzVjNDEtNWM3ZDViMzA=, PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2025-10-24T16:28:39.696478Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=2&id=Yjg2N2UwMDktNTM1Mzg2ZWItZWE5YzVjNDEtNWM3ZDViMzA=, ActorId: [2:7564826048440790651:2954], ActorState: ExecuteState, TraceId: 01k8bgt0b54mcgt3mj1042n0gz, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16721, MsgBus: 24914 2025-10-24T16:28:40.102047Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7564826060089473069:2144];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:40.102205Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:40.105046Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d28/r3tmp/tmpYR0mYO/pdisk_1.dat 2025-10-24T16:28:40.123154Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:40.127877Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:40.127904Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:40.132381Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16721, node 3 2025-10-24T16:28:40.137797Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:40.137808Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:40.137809Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:40.137863Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24914 TClient is connected to server localhost:24914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:40.195238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:40.197176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:40.387017Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:40.554560Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564826060089473601:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:40.554581Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564826060089473615:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:40.554607Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:40.554694Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7564826060089473639:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:40.554709Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:40.555455Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:40.558203Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7564826060089473638:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:40.627758Z node 3 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [3:7564826060089473691:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:40.642592Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.667285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.794651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:41.105440Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:41.133925Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=3&id=Y2M5ZDM2NmQtZmZlOGZkNDQtZDllNWUyZmMtYzYxNDQyYw==, ActorId: [3:7564826060089481376:2953], ActorState: ExecuteState, TraceId: 01k8bgt1s4djdcz3adg01hsdj3, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2025-10-24T16:28:38.311844Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.311944Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.312002Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.312055Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.312097Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.312140Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.314432Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.315237Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.321020Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321150Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321204Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321254Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321292Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321330Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321415Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321498Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321517Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321531Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321549Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321561Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321575Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321595Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.321655Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321681Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321807Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321864Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321883Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.321900Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.322035Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.322051Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.322072Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322095Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322111Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322153Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322179Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322201Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.322234Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.322270Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:38.322381Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.323299Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.323403Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.324183Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.324219Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.324252Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.324280Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328244Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328576Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328616Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328649Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328748Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328773Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.328936Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329021Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329064Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329128Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329338Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329959Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.329976Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.330230Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.330292Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:38.346315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:38.346336Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-10-24T16:28:38.350516Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:38.350910Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:38.350980Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-10-24T16:28:38.351152Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:38.351736Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-10-24T16:28:38.351838Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-10-24T16:28:38.351922Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-10-24T16:28:38.351936Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.351940Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-10-24T16:28:38.351955Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-10-24T16:28:38.351984Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-10-24T16:28:38.351989Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-10-24T16:28:38.351993Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-10-24T16:28:38.351997Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:38.352012Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-10-24T16:28:38.352017Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-10-24T16:28:38.397028Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-10-24T16:28:38.397084Z node 1 :NODE_BROKER TRACE: node_brok ... 8:40.318820Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:812:2326], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.318837Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:815:2329] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318851Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:816:2330] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318863Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:813:2327] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318876Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:811:2325] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318888Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:817:2331] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318900Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:813:2327], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.318908Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:812:2326] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318919Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:814:2328] Leader: 1 Dead: 0 Generation: 3 VersionInfo:  } 2025-10-24T16:28:40.318932Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:814:2328], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.318974Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:815:2329], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.318995Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:816:2330], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319021Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:817:2331], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319096Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 5 } 2025-10-24T16:28:40.319104Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319116Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319170Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [7:199:2072], Recipient [1:811:2325] 2025-10-24T16:28:40.319175Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319183Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319195Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [2:54:2072], Recipient [1:812:2326] 2025-10-24T16:28:40.319199Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319206Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319228Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [6:170:2072], Recipient [1:813:2327] 2025-10-24T16:28:40.319232Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319238Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319258Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [3:83:2072], Recipient [1:814:2328] 2025-10-24T16:28:40.319262Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319269Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319289Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [4:112:2072], Recipient [1:815:2329] 2025-10-24T16:28:40.319293Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319300Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319319Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [5:141:2072], Recipient [1:816:2330] 2025-10-24T16:28:40.319323Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319329Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319347Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [8:228:2072], Recipient [1:817:2331] 2025-10-24T16:28:40.319352Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319360Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319488Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:822:2336], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319511Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:28:40.319515Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319522Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319588Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:824:2338], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319607Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:28:40.319611Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319618Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319679Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:826:2340], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319695Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:28:40.319698Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319704Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.319759Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:828:2342], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.319779Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 9 } 2025-10-24T16:28:40.319783Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:28:40.319789Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #7.9 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z - 1970-01-01T08:00:00.025000Z 2025-10-24T16:28:40.320079Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:830:2344], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.320117Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039950, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvSubscribeNodesRequest { CachedVersion: 9 SeqNo: 2 } 2025-10-24T16:28:40.320124Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:257: StateWork, processing event TEvNodeBroker::TEvSubscribeNodesRequest 2025-10-24T16:28:40.320133Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:747: New subscriber [1:627:2216], seqNo: 2, version: 9, server pipe id: [1:830:2344] 2025-10-24T16:28:40.320147Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v9 -> v9 to [1:627:2216] 2025-10-24T16:28:40.320217Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877764, Sender [1:830:2344], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:28:40.320224Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:768: Unsubscribed [1:627:2216], seqNo: 2, server pipe id: [1:830:2344] 2025-10-24T16:28:40.320261Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:832:2346], Recipient [1:771:2299]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:40.320278Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039937, Sender [1:627:2216], Recipient [1:771:2299]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-10-24T16:28:40.320284Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:250: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-10-24T16:28:40.320304Z node 1 :NODE_BROKER TRACE: node_broker.cpp:1485: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: 2025-10-24T16:27:54.523954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825860328986597:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.524134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020c0/r3tmp/tmpBAPh4o/pdisk_1.dat 2025-10-24T16:27:54.563282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.578482Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20579, node 1 2025-10-24T16:27:54.588579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.588593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.588595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.588635Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:54.592964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) TClient is connected to server localhost:32330 2025-10-24T16:27:54.626042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.626079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.627367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.631020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:54.639003Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825861926088345:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.639042Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:54.639551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.639577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.640273Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:54.640695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.641022Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:54.641542Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:54.650941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.650968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.651031Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651068Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651082Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651097Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651110Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651126Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651139Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.651170Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:54.652051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.663203Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:54.663223Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:54.666770Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.669611Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:54.669626Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:54.669647Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:54.669650Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:54.669653Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:54.669658Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:54.669661Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:54.669666Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:54.669973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:7564825861926088661:2201] 2025-10-24T16:27:54.669984Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:54.670001Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:54.670517Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:54.670526Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:54.670535Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:54.672178Z node 2 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [2:7564825861926088850:2325] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-10-24T16:27:54.673045Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-10-24T16:27:54.673072Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:54.673227Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:54.673241Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:7564825861926088905:2309], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:54.673716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:7564825861926088939:2365] 2025-10-24T16:27:54.673733Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7564825861926088939:2365], schemeshard id = 72075186224037897 2025-10-24T16:27:54.723707Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:54.724761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:54.725887Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:54.725900Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-10-24T16:27:54.741976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.746979Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-10-24T16:27:54.845987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825860328987718:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.846001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.846035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825860328987727:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... e CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:34.791910Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:34.794375Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:28:34.805006Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:34.828438Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:34.859939Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:28:34.892195Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:34.927642Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:35.235805Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564826035713617038:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.235841Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.236295Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564826035713617048:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.236309Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.248995Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.262185Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.275842Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.288618Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.303011Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.319710Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.340109Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.361214Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:35.400661Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564826035713617913:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.400686Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.400778Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564826035713617919:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.400783Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7564826035713617918:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.400786Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:35.401784Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:35.408338Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7564826035713617922:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:35.492222Z node 7 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [7:7564826035713617974:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:35.665220Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query >> GenericFederatedQuery::PostgreSQLFilterPushdown >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> DataShardWrite::IncrementImmediate >> DataShardWrite::UpsertWithDefaults >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> TxUsage::WriteToTopic_Demo_24_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-10-24T16:28:02.382167Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:02.382215Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:02.387837Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:02.387931Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:02.410490Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:02.410757Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=11680712936437874348, session=0, seqNo=0) 2025-10-24T16:28:02.410821Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:02.427545Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=11680712936437874348, session=1) 2025-10-24T16:28:02.427673Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=13879976482565208767, session=0, seqNo=0) 2025-10-24T16:28:02.427713Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:02.438608Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=13879976482565208767, session=2) 2025-10-24T16:28:02.438787Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:149:2171], cookie=13260079634091445027, name="Sem1", limit=1) 2025-10-24T16:28:02.438830Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:02.449781Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:149:2171], cookie=13260079634091445027) 2025-10-24T16:28:02.449877Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:02.449936Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:02.449995Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-10-24T16:28:02.460946Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:02.460983Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=222) 2025-10-24T16:28:02.461129Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:157:2179], cookie=1704902951451457354, name="Sem1") 2025-10-24T16:28:02.461163Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:157:2179], cookie=1704902951451457354) 2025-10-24T16:28:02.461237Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:160:2182], cookie=4630185559580667139, name="Sem1") 2025-10-24T16:28:02.461244Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:160:2182], cookie=4630185559580667139) 2025-10-24T16:28:02.892999Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:02.905650Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:03.268434Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:03.283810Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:03.641824Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:03.652647Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:03.991877Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:04.002805Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:04.349782Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:04.365562Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:04.710475Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:04.721248Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.047137Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.058109Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.393395Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.405131Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.758974Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.769823Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.173005Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.183914Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.534479Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.545430Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.900625Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.911529Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:07.262616Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:07.273489Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:07.619331Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:07.630246Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.037074Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.048213Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.394179Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.409546Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.773039Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.783844Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:09.136058Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:09.147094Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:09.502944Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:09.513878Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:09.919148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:09.930498Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:10.304336Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:10.317401Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:10.700087Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:10.717309Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:11.105478Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:11.117562Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:11.497887Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:11.515489Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:11.932102Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:11.948372Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:12.370398Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:12.389683Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:12.779467Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:12.794511Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:13.180941Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:13.196322Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:13.582416Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:13.595653Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:14.063151Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:14.080236Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:14.487319Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:14.499308Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:14.874814Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:14.890651Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:15.284894Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:15.298620Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:15.680549Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:15.700297Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... BUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:38.522024Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:38.889575Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:38.905103Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:39.269822Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:39.285594Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:39.718403Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:39.731753Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:40.109810Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:40.124777Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:40.501403Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:40.512888Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:40.858551Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:40.870191Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:41.209365Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:41.223048Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:41.592777Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:41.603718Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:41.953200Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:41.964138Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:42.346740Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:42.360394Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:42.744139Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:42.756317Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:43.131058Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:43.141941Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:43.518218Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-10-24T16:28:43.518249Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-10-24T16:28:43.518259Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-10-24T16:28:43.529664Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-10-24T16:28:43.540037Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:456:2415], cookie=17925396852113048101, name="Sem1") 2025-10-24T16:28:43.540080Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:456:2415], cookie=17925396852113048101) 2025-10-24T16:28:43.642210Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:43.642242Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:43.647258Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:43.647465Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:43.680154Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:43.680322Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=4480084863006789894, session=0, seqNo=0) 2025-10-24T16:28:43.680364Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:43.696390Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=4480084863006789894, session=1) 2025-10-24T16:28:43.696475Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=1577297739593159830, session=0, seqNo=0) 2025-10-24T16:28:43.696504Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:43.708353Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=1577297739593159830, session=2) 2025-10-24T16:28:43.708487Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=9759894804304274987, session=0, seqNo=0) 2025-10-24T16:28:43.708536Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-10-24T16:28:43.719673Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=9759894804304274987, session=3) 2025-10-24T16:28:43.719896Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=1715873284713058678, name="Sem1", limit=3) 2025-10-24T16:28:43.719947Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:43.732179Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=1715873284713058678) 2025-10-24T16:28:43.732322Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=111, session=1, semaphore="Sem1" count=2) 2025-10-24T16:28:43.732380Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:43.732438Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-10-24T16:28:43.732451Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-10-24T16:28:43.732473Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=333, session=3, semaphore="Sem1" count=1) 2025-10-24T16:28:43.744308Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=111) 2025-10-24T16:28:43.744349Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=222) 2025-10-24T16:28:43.744357Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=333) 2025-10-24T16:28:43.744529Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=3863908861136286040, name="Sem1") 2025-10-24T16:28:43.744557Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=3863908861136286040) 2025-10-24T16:28:43.744623Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=9793401557093430943, name="Sem1") 2025-10-24T16:28:43.744630Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=9793401557093430943) 2025-10-24T16:28:43.744664Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=444, session=1, semaphore="Sem1" count=1) 2025-10-24T16:28:43.744704Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-10-24T16:28:43.757189Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=444) 2025-10-24T16:28:43.757391Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=13269847565678857143, name="Sem1") 2025-10-24T16:28:43.757422Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=13269847565678857143) 2025-10-24T16:28:43.757501Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=16644949095827685886, name="Sem1") 2025-10-24T16:28:43.757512Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=16644949095827685886) 2025-10-24T16:28:43.765578Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:43.765624Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:43.765688Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:43.766050Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:43.810838Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:43.810916Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:43.810927Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-10-24T16:28:43.810933Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-10-24T16:28:43.811069Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:209:2222], cookie=10738518473917837792, name="Sem1") 2025-10-24T16:28:43.811096Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:209:2222], cookie=10738518473917837792) 2025-10-24T16:28:43.811229Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:218:2230], cookie=2475461560307990150, name="Sem1") 2025-10-24T16:28:43.811237Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:218:2230], cookie=2475461560307990150) >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3258, MsgBus: 8125 2025-10-24T16:27:58.991832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825877830305677:2069];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:58.992195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002087/r3tmp/tmphWzNqX/pdisk_1.dat 2025-10-24T16:27:59.039814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:59.039876Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:59.040082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825877830305643:2081] 1761323278991323 != 1761323278991326 TServer::EnableGrpc on GrpcPort 3258, node 1 2025-10-24T16:27:59.049479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.049493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.049495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.049539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8125 TClient is connected to server localhost:8125 2025-10-24T16:27:59.095727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.095763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.097853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.114509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.117495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:27:59.121291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.146985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.171279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.186792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.250103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:59.417907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882125274590:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.417934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.418036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882125274600:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.418042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.480519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.495563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.508272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.523496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.538254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.555911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.569147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.583817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:59.599695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882125275461:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.599719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.599835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882125275466:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.599862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825882125275467:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:59.599868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Datab ... t called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:38.691417Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:38.791268Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:38.945380Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:39.169185Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:39.466356Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1704:3309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:39.466413Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:39.466605Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1777:3328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:39.466639Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:39.476135Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:39.656507Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:39.884214Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.084801Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.309588Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.512667Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:40.789886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:41.008783Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:41.255865Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2588:3969], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:41.255910Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:41.255989Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2592:3973], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:41.256005Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2595:3976], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:41.256014Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:41.257321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:41.394597Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2597:3978], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:41.458130Z node 4 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [4:2658:4020] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:41.780100Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:41.946843Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:42.218021Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:43.569690Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1547: SelfId: [4:3370:4561], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgt34hek622p66ykq3emh4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGYzZTY0ZjctYzIxYmRmYTAtNzM2ZDJmNWEtMzNmZDRkNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-10-24T16:28:43.569730Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [4:3370:4561], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgt34hek622p66ykq3emh4. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGYzZTY0ZjctYzIxYmRmYTAtNzM2ZDJmNWEtMzNmZDRkNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-10-24T16:28:43.570017Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [4:3371:4562], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgt34hek622p66ykq3emh4. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OGYzZTY0ZjctYzIxYmRmYTAtNzM2ZDJmNWEtMzNmZDRkNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [4:3364:4227], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:43.570169Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=4&id=OGYzZTY0ZjctYzIxYmRmYTAtNzM2ZDJmNWEtMzNmZDRkNzQ=, ActorId: [4:2927:4227], ActorState: ExecuteState, TraceId: 01k8bgt34hek622p66ykq3emh4, Create QueryResponse for error on request, msg: >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::StorageNoQuota >> TxUsage::WriteToTopic_Demo_43_Query >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::UpsertImmediate >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::AsyncIndexKeySizeConstraint >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> DataShardWrite::UpsertImmediateManyColumns >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::ReplaceImmediate >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> THealthCheckTest::TestStateStorageOk [GOOD] >> THealthCheckTest::TestStateStorageBlue >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] Test command err: 2025-10-24T16:28:36.546916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.546975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.603957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:36.604427Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:36.606872Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.606910Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.606920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:36.607278Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.607439Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.607484Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189f/r3tmp/tmpPkzWUQ/pdisk_1.dat 2025-10-24T16:28:36.766839Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:36.816613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.816652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.816763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.816785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.874792Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:36.875037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:36.875118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24235, node 1 TClient is connected to server localhost:14539 2025-10-24T16:28:37.003530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:37.003553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:37.003604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:37.003685Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:38.428419Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.428500Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.431601Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.431920Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:693:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.432008Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:38.432034Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.432433Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:689:2280], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.432492Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.432511Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189f/r3tmp/tmpiB5a34/pdisk_1.dat 2025-10-24T16:28:38.565697Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:38.618970Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.619017Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.619151Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.619165Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.690777Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:28:38.690914Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:38.691044Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11573, node 3 TClient is connected to server localhost:61705 2025-10-24T16:28:39.273512Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:39.274402Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-10-24T16:28:39.274916Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:39.281551Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:39.281583Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:39.281588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:39.282291Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:39.283126Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2025-10-24T16:28:39.283212Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:39.292086Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:39.292123Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:39.340543Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:28:39.340760Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:39.425726Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:39.445606Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2025-10-24T16:28:39.446012Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-3" reason: "YELLOW-7932-1231c6b1-4" reason: "YELLOW-7932-1231c6b1-5" type: "COMPUTE" level: 2 } issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } issue_log { id: "RED-3c4a-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-b954-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "RED-5995-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-3c4a-1231c6b1" reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 3 host: "::1" port: 12001 } 2025-10-24T16:28:40.598084Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.59993 ... } pool { name: "/Root:test" group { vdisk { id: "2181038080-1-0-0-0" } } } } } type: "VDISK" level: 5 } location { id: 6 host: "::1" port: 12001 } 2025-10-24T16:28:42.525540Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:42.525793Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:42.526037Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:757:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-10-24T16:28:42.533454Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:42.533831Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:42.535512Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:761:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:42.535647Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:42.535657Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:42.536042Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:42.536070Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189f/r3tmp/tmpziDXTW/pdisk_1.dat 2025-10-24T16:28:42.704017Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:42.767813Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:42.767868Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:42.768092Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:42.768108Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:42.820828Z node 7 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-10-24T16:28:42.821169Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:42.821267Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11365, node 7 TClient is connected to server localhost:9797 2025-10-24T16:28:43.442380Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:43.443489Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-10-24T16:28:43.444026Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:43.444267Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:43.444277Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:43.444282Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:43.445165Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:43.446045Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-10-24T16:28:43.446119Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:43.449725Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:43.449758Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:43.478342Z node 7 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-10-24T16:28:43.478479Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:43.545286Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-10-24T16:28:43.545545Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-7" reason: "YELLOW-7932-1231c6b1-8" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 7 host: "::1" port: 12001 } 2025-10-24T16:28:44.781259Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:44.782590Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:44.783030Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.783081Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.783107Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189f/r3tmp/tmpBkFkpX/pdisk_1.dat 2025-10-24T16:28:44.904630Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.940920Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.940952Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.964088Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23438, node 10 TClient is connected to server localhost:17816 2025-10-24T16:28:45.047946Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:45.047971Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:45.047977Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:45.048139Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:46.094068Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:46.096313Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:46.096940Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:46.097009Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:46.097057Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00189f/r3tmp/tmpQtuE08/pdisk_1.dat 2025-10-24T16:28:46.251024Z node 12 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:46.307493Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:46.307538Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:46.336715Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4643, node 12 TClient is connected to server localhost:27269 2025-10-24T16:28:46.429555Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:46.429575Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:46.429580Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:46.429639Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] >> THealthCheckTest::NoStoragePools [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-10-24T16:27:19.846064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:19.870706Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:27:19.870779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:27:19.870805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002ad8/r3tmp/tmpUIx0pd/pdisk_1.dat 2025-10-24T16:27:19.986646Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:20.020582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:20.020630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:20.054177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10634, node 1 2025-10-24T16:27:20.147063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:20.147087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:20.147090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:20.147237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:27:20.150244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:27:20.198031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11948 2025-10-24T16:27:20.546980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-10-24T16:27:21.359303Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:27:21.361684Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-10-24T16:27:21.370847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.370882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.414344Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:27:21.414933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.488337Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:21.488379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:21.488726Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.488877Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489012Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489193Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489285Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489323Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.489381Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-10-24T16:27:21.501174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:21.556561Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:21.564880Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-10-24T16:27:21.564910Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-10-24T16:27:21.571055Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-10-24T16:27:21.571238Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-10-24T16:27:21.571262Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-10-24T16:27:21.571268Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-10-24T16:27:21.571275Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-10-24T16:27:21.571282Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-10-24T16:27:21.571289Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-10-24T16:27:21.571296Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-10-24T16:27:21.571470Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-10-24T16:27:21.571768Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1611:2452] 2025-10-24T16:27:21.573520Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-10-24T16:27:21.575399Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-10-24T16:27:21.575419Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-10-24T16:27:21.575430Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-10-24T16:27:21.578598Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8309: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.578624Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8339: ConnectToSA(), pipe client id: [2:1846:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-10-24T16:27:21.580403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1864:2607] 2025-10-24T16:27:21.580671Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1864:2607], schemeshard id = 72075186224037897 2025-10-24T16:27:21.586103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:21.588043Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-10-24T16:27:21.588081Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-10-24T16:27:21.659493Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:21.710124Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-10-24T16:27:21.754046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-10-24T16:27:21.855483Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-10-24T16:27:21.980934Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-10-24T16:27:22.660092Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:27:22.712266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2220:3058], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.712332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.712477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3063], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:22.712490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, ... bution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:43.019016Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:43.019090Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-10-24T16:28:43.019255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:6087:4752], server id = [2:6088:4753], tablet id = 72075186224037899, status = OK 2025-10-24T16:28:43.019284Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:6087:4752], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:28:43.019599Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:28:43.019610Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:43.019639Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:43.019663Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:43.019735Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:6090:4755], ActorId: [2:6091:4756], Starting query actor #1 [2:6092:4757] 2025-10-24T16:28:43.019748Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:6091:4756], ActorId: [2:6092:4757], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:28:43.020374Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:6087:4752], server id = [2:6088:4753], tablet id = 72075186224037899 2025-10-24T16:28:43.020385Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:43.020556Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:6091:4756], ActorId: [2:6092:4757], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjBiNGVmOGMtZDJlMWIyYTEtOTVjNTYxNzgtYTYxYzQxMDM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:43.035014Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:6091:4756], ActorId: [2:6092:4757], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjBiNGVmOGMtZDJlMWIyYTEtOTVjNTYxNzgtYTYxYzQxMDM=, TxId: 2025-10-24T16:28:43.035046Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:6091:4756], ActorId: [2:6092:4757], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjBiNGVmOGMtZDJlMWIyYTEtOTVjNTYxNzgtYTYxYzQxMDM=, TxId: 2025-10-24T16:28:43.035158Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:6090:4755], ActorId: [2:6091:4756], Got response [2:6092:4757] SUCCESS 2025-10-24T16:28:43.035257Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:43.070829Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:28:43.070864Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:3100:3330] 2025-10-24T16:28:43.589638Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 11 is different from the current 0 2025-10-24T16:28:43.589687Z node 2 :STATISTICS DEBUG: service_impl.cpp:379: Skip TEvDispatchKeepAlive 2025-10-24T16:28:44.067473Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-10-24T16:28:44.067572Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 11 2025-10-24T16:28:44.067599Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2025-10-24T16:28:44.079728Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8441: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-10-24T16:28:44.079765Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-10-24T16:28:44.079865Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 53, entries count: 2, are all stats full: 1 2025-10-24T16:28:44.094422Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-10-24T16:28:44.180228Z node 2 :STATISTICS DEBUG: service_impl.cpp:252: Event round 10 is different from the current 0 2025-10-24T16:28:44.180267Z node 2 :STATISTICS DEBUG: service_impl.cpp:1023: Skip TEvStatisticsRequestTimeout 2025-10-24T16:28:44.180310Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:28:44.180316Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-10-24T16:28:44.180321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:28:45.176119Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:28:46.192148Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:649: [72075186224037894] ScheduleNextAnalyze 2025-10-24T16:28:46.192182Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:677: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-10-24T16:28:46.192188Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:681: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-10-24T16:28:47.128135Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-10-24T16:28:47.201967Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-10-24T16:28:47.202088Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-10-24T16:28:47.202097Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:761: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:28:47.202397Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-10-24T16:28:47.236220Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-10-24T16:28:47.236438Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:102: [72075186224037894] TTxResolve::Execute 2025-10-24T16:28:47.236459Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:133: [72075186224037894] TTxResolve::Complete 2025-10-24T16:28:47.236620Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:58: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-10-24T16:28:47.261186Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:92: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-10-24T16:28:47.261278Z node 2 :STATISTICS DEBUG: service_impl.cpp:588: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-10-24T16:28:47.261516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 2, client id = [2:6248:4831], server id = [2:6249:4832], tablet id = 72075186224037899, status = OK 2025-10-24T16:28:47.261573Z node 2 :STATISTICS DEBUG: service_impl.cpp:1057: TEvStatisticsRequest send, client id = [2:6248:4831], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-10-24T16:28:47.262191Z node 2 :STATISTICS DEBUG: service_impl.cpp:317: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-10-24T16:28:47.262212Z node 2 :STATISTICS DEBUG: service_impl.cpp:502: Send aggregate statistics response to node: 2 2025-10-24T16:28:47.262250Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-10-24T16:28:47.262288Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:118: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-10-24T16:28:47.262391Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:6251:4834], ActorId: [2:6252:4835], Starting query actor #1 [2:6253:4836] 2025-10-24T16:28:47.262412Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:6252:4835], ActorId: [2:6253:4836], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-10-24T16:28:47.263519Z node 2 :STATISTICS DEBUG: service_impl.cpp:1123: EvClientDestroyed, node id = 2, client id = [2:6248:4831], server id = [2:6249:4832], tablet id = 72075186224037899 2025-10-24T16:28:47.263534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1141: Skip EvClientDestroyed 2025-10-24T16:28:47.263765Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:6252:4835], ActorId: [2:6253:4836], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTZhZjY5OTMtMjNmMjBjYjktZmY3ODI3M2MtNTBkYTYxN2I=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-10-24T16:28:47.289261Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:6252:4835], ActorId: [2:6253:4836], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTZhZjY5OTMtMjNmMjBjYjktZmY3ODI3M2MtNTBkYTYxN2I=, TxId: 2025-10-24T16:28:47.289293Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:6252:4835], ActorId: [2:6253:4836], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTZhZjY5OTMtMjNmMjBjYjktZmY3ODI3M2MtNTBkYTYxN2I=, TxId: 2025-10-24T16:28:47.289380Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-10-24T16:28:47.289416Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:6251:4834], ActorId: [2:6252:4835], Got response [2:6253:4836] SUCCESS 2025-10-24T16:28:47.289493Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-10-24T16:28:47.304591Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-10-24T16:28:47.304626Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:50: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:3100:3330] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] Test command err: 2025-10-24T16:28:37.234867Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:37.235120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:37.298041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:37.300747Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:305:2228], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:37.300894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:37.300906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:37.301335Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:689:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:37.301383Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:37.301411Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmp6AiccN/pdisk_1.dat 2025-10-24T16:28:37.490917Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:37.552735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:37.552773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:37.552950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:37.552963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:37.592700Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:37.592889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:37.592996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8483, node 1 TClient is connected to server localhost:15633 2025-10-24T16:28:37.705076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:37.705097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:37.705102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:37.705151Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:39.231374Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:39.231425Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:39.234157Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:39.234422Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:39.234735Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:503:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:39.234809Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:39.234844Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:39.235347Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:497:2169], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:39.235420Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:39.235446Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmpOCV8ck/pdisk_1.dat 2025-10-24T16:28:39.446229Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:39.485086Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:39.485134Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:39.485330Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:39.485344Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:39.517481Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:28:39.517665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:39.517758Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19486, node 3 TClient is connected to server localhost:24377 2025-10-24T16:28:39.614841Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:39.614864Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:39.614869Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:39.614993Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:41.013550Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.013600Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.016160Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.016564Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.016863Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.016927Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.016982Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:41.017253Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:680:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.017391Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.017413Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmpAxBi2o/pdisk_1.dat 2025-10-24T16:28:41.168901Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:41.216460Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.216501Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.216618Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.216630Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.264801Z node 5 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-10-24T16:28:41.264955Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:41.265084Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16158, node 5 TClient is connected to server localhost:30389 2025-10-24T16:28:41.375692Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:41.375719Z node 5 :NET_CLASSIFIER WAR ... not detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.719671Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:497:2169], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.719800Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:44.719844Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmpQPj4ZP/pdisk_1.dat 2025-10-24T16:28:44.811053Z node 9 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.854057Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.854109Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.854302Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.854315Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.888800Z node 9 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-10-24T16:28:44.889068Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:44.889179Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29905, node 9 TClient is connected to server localhost:31792 2025-10-24T16:28:44.980306Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:44.980331Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:44.980336Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:44.980516Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } 2025-10-24T16:28:45.821836Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:45.823569Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:45.824014Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:45.824090Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:45.824106Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmpv0lDlj/pdisk_1.dat 2025-10-24T16:28:45.938621Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:45.974267Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.974313Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.997514Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3641, node 11 TClient is connected to server localhost:18156 2025-10-24T16:28:46.121542Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:46.121566Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:46.121571Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:46.121662Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-11" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } database_status { name: "/Root" overall: YELLOW storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN } } } compute { overall: YELLOW nodes { id: "11" overall: YELLOW load { overall: YELLOW load: 141.82959 cores: 64 } } clock_skew { overall: GREEN } } } database_status { name: "/Root/shared" overall: GREEN storage { overall: GREEN pools { id: "/Root:test" overall: GREEN groups { id: "2147483648" overall: GREEN vdisks { id: "11-1-55" overall: GREEN pdisk { id: "11-1" overall: GREEN } } } } } compute { overall: GREEN nodes { id: "12" overall: GREY } clock_skew { overall: GREEN } } } location { id: 11 host: "::1" port: 12001 } 2025-10-24T16:28:47.008114Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:47.009977Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:47.010516Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:47.010579Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:47.010609Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001899/r3tmp/tmptwlr0n/pdisk_1.dat 2025-10-24T16:28:47.115140Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:47.152988Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:47.153043Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:47.194814Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25357, node 13 TClient is connected to server localhost:17099 2025-10-24T16:28:47.270328Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:47.270352Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:47.270356Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:47.270488Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-dd01" status: RED message: "There are no storage pools" type: "STORAGE" level: 2 } issue_log { id: "RED-5cc9" status: RED message: "Database has storage issues" reason: "RED-dd01" type: "DATABASE" level: 1 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: GREEN nodes { id: "14" overall: GREEN load { overall: GREEN cores: 64 } } clock_skew { overall: GREEN } } } location { id: 13 } self_check_result: EMERGENCY issue_log { id: "YELLOW-f5df-14" status: YELLOW message: "The number of node restarts has increased" location { compute { node { id: 14 host: "::1" port: 12001 } } } type: "NODE_UPTIME" level: 4 } issue_log { id: "YELLOW-c2be" status: YELLOW message: "Some nodes are restarting too often" reason: "YELLOW-f5df-14" type: "COMPUTE" level: 2 } issue_log { id: "RED-dd01" status: RED message: "There are no storage pools" type: "STORAGE" level: 2 } issue_log { id: "RED-edf5" status: RED message: "Database has multiple issues" reason: "RED-dd01" reason: "YELLOW-c2be" type: "DATABASE" level: 1 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: YELLOW nodes { id: "14" overall: GREEN load { overall: GREEN cores: 64 } } clock_skew { overall: GREEN } } } location { id: 13 } self_check_result: EMERGENCY issue_log { id: "ORANGE-534b-14" status: ORANGE message: "Node is restarting too often" location { compute { node { id: 14 host: "::1" port: 12001 } } } type: "NODE_UPTIME" level: 4 } issue_log { id: "ORANGE-c2be" status: ORANGE message: "Some nodes are restarting too often" reason: "ORANGE-534b-14" type: "COMPUTE" level: 2 } issue_log { id: "RED-dd01" status: RED message: "There are no storage pools" type: "STORAGE" level: 2 } issue_log { id: "RED-edf5" status: RED message: "Database has multiple issues" reason: "ORANGE-c2be" reason: "RED-dd01" type: "DATABASE" level: 1 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: ORANGE nodes { id: "14" overall: GREEN load { overall: GREEN cores: 64 } } clock_skew { overall: GREEN } } } location { id: 13 } >> DataShardWrite::InsertImmediate >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: 2025-10-24T16:28:37.547557Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:37.547798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:37.589801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:37.592403Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:305:2228], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:37.592534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:37.592545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:37.592979Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:689:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:37.593029Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:37.593054Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpDWflwc/pdisk_1.dat 2025-10-24T16:28:37.718524Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:37.768890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:37.768927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:37.769115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:37.769137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:37.804740Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:37.804912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:37.805010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14702, node 1 TClient is connected to server localhost:14955 2025-10-24T16:28:37.936460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:37.936483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:37.936487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:37.936616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:39.218627Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:39.218672Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:39.221319Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:39.221594Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:39.222001Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:39.222060Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:39.222107Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:39.222345Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:39.222521Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:39.222532Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmp7PKlHF/pdisk_1.dat 2025-10-24T16:28:39.370071Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:39.416403Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:39.416437Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:39.416515Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:39.416526Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:39.456500Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:28:39.456748Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:39.456854Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29298, node 3 TClient is connected to server localhost:27322 2025-10-24T16:28:39.554877Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:39.554899Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:39.554904Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:39.555039Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:41.131560Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.131611Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.134189Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.134455Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.134841Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.134910Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.134936Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:41.135316Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.135360Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.135384Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpLr1xCM/pdisk_1.dat 2025-10-24T16:28:41.233656Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:41.271129Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.271177Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.271288Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.271304Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.303550Z node 5 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-10-24T16:28:41.303762Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:41.303933Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11076, node 5 TClient is connected to server localhost:14393 2025-10-24T16:28:41.394986Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:41.395007Z node 5 :NET_CLASSIFIER WA ... essor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:44.535953Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.536365Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:497:2169], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.536481Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:44.536521Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpGT1euH/pdisk_1.dat 2025-10-24T16:28:44.631523Z node 9 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.670302Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.670352Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.670520Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.670535Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.708446Z node 9 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-10-24T16:28:44.708694Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:44.708784Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13848, node 9 TClient is connected to server localhost:6168 2025-10-24T16:28:44.793617Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:44.793635Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:44.793641Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:44.793771Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpGT1euH/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpGT1euH/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpGT1euH/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } 2025-10-24T16:28:46.207291Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:46.207340Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:46.210436Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:46.210838Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:46.211194Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:46.211287Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:46.211317Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:46.211586Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:680:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:46.211662Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:46.211686Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpryLicR/pdisk_1.dat 2025-10-24T16:28:46.350551Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:46.407737Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:46.407797Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:46.408009Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:46.408028Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:46.440168Z node 11 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-10-24T16:28:46.440534Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:46.440628Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62201, node 11 TClient is connected to server localhost:22497 2025-10-24T16:28:46.513409Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:46.513428Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:46.513433Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:46.513502Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:47.462752Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:47.464173Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:47.464582Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:445:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:47.464630Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:47.464648Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001868/r3tmp/tmpktN6sx/pdisk_1.dat 2025-10-24T16:28:47.552145Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:47.585309Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:47.585354Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:47.628140Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19418, node 13 TClient is connected to server localhost:6742 2025-10-24T16:28:47.773302Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:47.773322Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:47.773327Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:47.773448Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-dd01" status: RED message: "There are no storage pools" type: "STORAGE" level: 2 } issue_log { id: "RED-5cc9" status: RED message: "Database has storage issues" reason: "RED-dd01" type: "DATABASE" level: 1 } database_status { name: "/Root/database" overall: RED storage { overall: RED } compute { overall: GREEN nodes { id: "14" overall: GREEN load { overall: GREEN cores: 64 } } clock_skew { overall: GREEN } } } location { id: 13 host: "::1" port: 12001 } >> TRestoreTests::CancelUponProposeShouldSucceed[Raw] >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> TImportTests::ShouldSucceedOnIndexedTable1 >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> TRestoreTests::CancelUponProposeShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelUponProposeShouldSucceed[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: 2025-10-24T16:28:35.982741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:35.982793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.016887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:36.017161Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:36.019619Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.019657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.019667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:36.020036Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.020279Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.020309Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018cd/r3tmp/tmpFtUrV3/pdisk_1.dat 2025-10-24T16:28:36.128911Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:36.176346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.176389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.176487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.176496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.232876Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:36.233116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:36.233223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7862, node 1 TClient is connected to server localhost:1677 2025-10-24T16:28:36.365755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:36.365783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:36.365788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:36.365874Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-7932-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-1" reason: "YELLOW-7932-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-edf5-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } 2025-10-24T16:28:38.181274Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.181327Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.184126Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.184375Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.184718Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:503:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.184797Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:38.184833Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.185370Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:497:2169], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.185458Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.185490Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018cd/r3tmp/tmp54cmM2/pdisk_1.dat 2025-10-24T16:28:38.300146Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:38.341855Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.341892Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.342043Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.342056Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.377510Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:28:38.377705Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:38.377799Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9139, node 3 TClient is connected to server localhost:61982 2025-10-24T16:28:38.482376Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:38.482402Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:38.482407Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:38.482547Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:40.036649Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.036701Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.039322Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.039734Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.040047Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:40.040110Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:40.040167Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:40.040435Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:680:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16: ... ) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:43.952612Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:43.952626Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:43.999025Z node 9 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-10-24T16:28:43.999301Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:43.999389Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62259, node 9 TClient is connected to server localhost:10212 2025-10-24T16:28:44.090302Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:44.090319Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:44.090324Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:44.090497Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:45.543800Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:45.543873Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:45.546629Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:45.546993Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:45.547342Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:45.547436Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:45.547492Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:45.547734Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:680:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:45.547814Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:45.547839Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018cd/r3tmp/tmpfUakv3/pdisk_1.dat 2025-10-24T16:28:45.643561Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:45.685052Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.685096Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.685309Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.685323Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.720598Z node 11 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-10-24T16:28:45.720839Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:45.720931Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7098, node 11 TClient is connected to server localhost:17976 2025-10-24T16:28:45.815263Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:45.815290Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:45.815295Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:45.815555Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:47.356769Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:47.357023Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:47.357814Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:762:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-10-24T16:28:47.360441Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:47.360682Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:47.361084Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:381:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:47.361187Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:47.361230Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:47.361603Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:47.361623Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018cd/r3tmp/tmpP55KJ0/pdisk_1.dat 2025-10-24T16:28:47.485887Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:47.536361Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:47.536403Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:47.536590Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:47.536605Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:47.558521Z node 13 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2025-10-24T16:28:47.558789Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:47.558921Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30008, node 13 TClient is connected to server localhost:3701 2025-10-24T16:28:48.015100Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:48.017251Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:48.017274Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:48.017280Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:48.017429Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:48.022675Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:48.022719Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:48.049031Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:48.061159Z node 13 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-10-24T16:28:48.061328Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" reason: "YELLOW-7932-1231c6b1-15" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 13 host: "::1" port: 12001 } >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::UpdateImmediate >> TImportTests::ShouldSucceedOnIndexedTable1 [GOOD] >> TImportTests::ShouldSucceedOnIndexedTable2 >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> TRestoreTests::CancelUponProposeShouldSucceed[Zstd] [GOOD] >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Raw] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Raw] >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Raw] >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelUponProposeResultShouldSucceed[Raw] >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 17622, MsgBus: 14358 2025-10-24T16:28:00.960468Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825888361749959:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:00.960481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00205e/r3tmp/tmpVJiCLX/pdisk_1.dat 2025-10-24T16:28:01.017262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:01.023231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825888361749931:2081] 1761323280960132 != 1761323280960135 2025-10-24T16:28:01.027513Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17622, node 1 2025-10-24T16:28:01.035421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:01.035432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:01.035433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:01.035469Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14358 2025-10-24T16:28:01.065476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:01.065507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:01.066614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:01.112223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:01.115465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-10-24T16:28:01.120625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:01.263338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:01.430491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892656718319:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.430496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892656718311:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.430511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.430567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825892656718326:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.430581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:01.431235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:01.432950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825892656718325:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:28:01.528406Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825892656718378:2610] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:01.601695Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=1;memory=1048576; 2025-10-24T16:28:01.601716Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 1. [Mem] memory 1048576 NOT granted 2025-10-24T16:28:01.603835Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:7564825892656718410:2349], TxId: 281474976715661, task: 1. Ctx: { TraceId : 01k8bgrv0nedjr8416emd2txqv. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZWMxYWU2M2ItMzJjYTBkZjktY2QwODY4MmEtZjY1ODRhYTA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-aakgzge3ui, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-10-24T16:28:01.588374Z }, code: 2029 }. 2025-10-24T16:28:01.606176Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037894 Cancelled read: {[1:7564825892656718412:2349], 6} 2025-10-24T16:28:01.606195Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037889 Cancelled read: {[1:7564825892656718412:2349], 1} 2025-10-24T16:28:01.606204Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037890 Cancelled read: {[1:7564825892656718412:2349], 2} 2025-10-24T16:28:01.606212Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037888 Cancelled read: {[1:7564825892656718412:2349], 0} 2025-10-24T16:28:01.606220Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037891 Cancelled read: {[1:7564825892656718412:2349], 3} 2025-10-24T16:28:01.606229Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037895 Cancelled read: {[1:7564825892656718412:2349], 7} 2025-10-24T16:28:01.606237Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037893 Cancelled read: {[1:7564825892656718412:2349], 5} 2025-10-24T16:28:01.606248Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3439: 72075186224037892 Cancelled read: {[1:7564825892656718412:2349], 4} 2025-10-24T16:28:01.607245Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=ZWMxYWU2M2ItMzJjYTBkZjktY2QwODY4MmEtZjY1ODRhYTA=, ActorId: [1:7564825892656718309:2349], ActorState: ExecuteState, TraceId: 01k8bgrv0nedjr8416emd2txqv, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-aakgzge3ui, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-10-24T16:28:01.588374Z } , code: 2029 query_phases { duration_us: 19707 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 4107 affected_shards: 8 } compilation { duration_us: 58076 cpu_time_us: 57170 } process_cpu_time_us: 185 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048586,\"Max\":1048586,\"Min\":1048586,\"History\":[3,1048586]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"FinishedTasks\ ... from file: (empty maybe) 2025-10-24T16:28:47.448910Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8987 TClient is connected to server localhost:8987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:47.508435Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:47.517581Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:47.528653Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:47.553075Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:47.571231Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:47.642515Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:47.823087Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826091024996482:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.823111Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.823154Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826091024996491:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.823161Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.836101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.845043Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.856732Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.871257Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.885226Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.899199Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.913376Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.927810Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:47.944223Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826091024997355:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.944249Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.944282Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826091024997362:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.944293Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.944296Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826091024997360:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:47.945376Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:47.954021Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564826091024997364:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:48.021955Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564826095319964712:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:48.262862Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:48.423360Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:49.194586Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=ZmQyNzBkNy0xZjgxYmRhMy03ZWVmOTA0LWE0NmFjMGEz, ActorId: [5:7564826095319965008:2521], ActorState: ExecuteState, TraceId: 01k8bgt9k04kxec7bav7f01hkx, Create QueryResponse for error on request, msg: >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnSmallBuffer >> TImportTests::ShouldSucceedOnIndexedTable2 [GOOD] >> TImportTests::ShouldSucceedOnIndexedTable3 >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:48.878981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:48.879011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:48.879017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:48.879025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:48.879032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:48.879037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:48.879048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:48.879072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:48.879227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:48.879298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:48.896736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:48.896765Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:48.900213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:48.900257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:48.900291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:48.902075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:48.902295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:48.902425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.902521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:48.903810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:48.903873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:48.904127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:48.904138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:48.904175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:48.904183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:48.904189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:48.904217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.905720Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:48.930767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:48.930857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.930928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:48.930936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:48.931005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:48.931021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:48.933710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.933768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:48.933845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.933858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:48.933865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:48.933871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:48.934468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:48.934881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.934907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:48.935713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:48.936209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:48.936250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:48.936469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.936498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:48.936506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.936582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:48.936590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.936638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:48.936651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:48.937106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:48.937115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:49.915934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:28:49.926984Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:49.937945Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:49.937985Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:49.938002Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv, range# 0-13 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CF6CEF67-950A-45C5-9BB1-1598FA9F0E92 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-10-24T16:28:49.938792Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a3ed28bfb53c9214f635c51ed6b618c4 Body: 14b } 2025-10-24T16:28:49.938811Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 14, body-size# 14 2025-10-24T16:28:49.938853Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:28:49.950340Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:49.950365Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:49.950375Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-10-24T16:28:49.950458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 102 TxId: 103 2025-10-24T16:28:49.950466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 103:0, target opId# 102:0 2025-10-24T16:28:49.953281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-10-24T16:28:49.953336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.953346Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:49.953356Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 133 2025-10-24T16:28:49.953867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.953884Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-10-24T16:28:49.953890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.954243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:28:49.954340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:28:49.954349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:28:49.954364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:28:49.954368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:28:49.954451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:49.954457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:49.954463Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:49.954484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:28:49.954495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:49.954500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:439:2407] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:49.965464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:49.965488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:49.965508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:49.965519Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:49.965531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:49.965537Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.965541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:49.965546Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-10-24T16:28:49.965583Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:49.966037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.966105Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.966112Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:49.966122Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:49.966125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:49.966129Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:49.966131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:49.966134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:49.966144Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:49.966149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:49.966154Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:49.966157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:49.966175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:49.966492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:49.966501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:439:2407] TestWaitNotification: OK eventTxId 102 >> TImportTests::NoACLOption >> TRestoreTests::CancelUponProposeResultShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelUponProposeResultShouldSucceed[Zstd] >> TRestoreTests::ShouldSucceedOnSmallBuffer [GOOD] >> TRestoreTests::ShouldSucceedOnSupportedDatatypes >> TImportTests::ShouldSucceedOnIndexedTable3 [GOOD] >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> THealthCheckTest::BridgeTimeDifference >> TImportTests::NoACLOption [GOOD] >> TImportTests::ShouldBlockMerge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedOnIndexedTable3 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:48.885203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:48.885241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:48.885248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:48.885259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:48.885269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:48.885274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:48.885286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:48.885304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:48.885461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:48.885547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:48.899950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:48.899980Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:48.902771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:48.902801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:48.902825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:48.904676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:48.904946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:48.905079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.905155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:48.906548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:48.906607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:48.906866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:48.906875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:48.906918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:48.906925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:48.906931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:48.906962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.908503Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:48.929892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:48.930012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.930100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:48.930109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:48.930185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:48.930202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:48.931288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.931348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:48.931435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.931457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:48.931464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:48.931469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:48.931904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.931920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:48.931930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:48.932651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.932664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:48.932670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.932679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:48.933255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:48.933739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:48.933814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:48.934075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.934166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:48.934172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:48.934211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:48.934221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:48.934660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:48.934671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... vel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:50.787055Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Unlocking 2025-10-24T16:28:50.787078Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Unlocking TBuildInfo{ IndexBuildId: 281474976710759, Uid: 101-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: by_value_1, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:140:2161], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 1 UploadBytes: 18 ReadRows: 1 ReadBytes: 18 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:28:50.787269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-10-24T16:28:50.787294Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000007 2025-10-24T16:28:50.787355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-10-24T16:28:50.787359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-10-24T16:28:50.787364Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-10-24T16:28:50.787402Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.787416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:50.787423Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710763:0 HandleReply TEvOperationPlan: step# 5000007 2025-10-24T16:28:50.787427Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 128 -> 240 2025-10-24T16:28:50.787767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.787780Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:28:50.787795Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:50.787802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:50.787808Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:50.787811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:50.787815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:28:50.787823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:28:50.787828Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:50.787832Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:28:50.787835Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:28:50.787843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:28:50.788369Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:28:50.788384Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:28:50.788395Z node 3 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2470: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 281474976710759, txId# 281474976710763 2025-10-24T16:28:50.788416Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2473: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 281474976710759, Uid: 101-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: by_value_1, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:140:2161], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 1 UploadBytes: 18 ReadRows: 1 ReadBytes: 18 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710763 2025-10-24T16:28:50.789334Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Unlocking 2025-10-24T16:28:50.789362Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Unlocking TBuildInfo{ IndexBuildId: 281474976710759, Uid: 101-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: by_value_1, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:140:2161], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 1 UploadBytes: 18 ReadRows: 1 ReadBytes: 18 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:28:50.789372Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-10-24T16:28:50.789839Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Done 2025-10-24T16:28:50.789877Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710759 Done TBuildInfo{ IndexBuildId: 281474976710759, Uid: 101-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: by_value_1, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:140:2161], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000005, ApplyTxId: 281474976710762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 1 UploadBytes: 18 ReadRows: 1 ReadBytes: 18 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:28:50.789884Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710759, subscribers count# 1 2025-10-24T16:28:50.789913Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-10-24T16:28:50.789920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710759 2025-10-24T16:28:50.789931Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:50.789937Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-10-24T16:28:50.790477Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:50.790502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:28:50.790510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 >> TRestoreTests::CancelUponProposeResultShouldSucceed[Zstd] [GOOD] >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] >> TRestoreTests::ShouldSucceedOnSupportedDatatypes [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit >> TImportWithRebootsTests::ShouldSucceedOnSingleChangefeed >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnSupportedDatatypes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:49.932963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:49.932995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:49.933001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:49.933011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:49.933019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:49.933024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:49.933034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:49.933061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:49.933207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:49.933280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:49.951062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:49.951087Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:49.954506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:49.954540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:49.954570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:49.955927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:49.956129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:49.956246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:49.956313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:49.957287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:49.957334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:49.957603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:49.957613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:49.957655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:49.957664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:49.957671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:49.957699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.959139Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:49.981293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:49.981384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.981453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:49.981462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:49.981530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:49.981545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:49.982232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:49.982336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:49.982351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:49.982356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:49.982665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:49.982935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:49.982948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:49.982955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:49.983655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:49.984072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:49.984123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:49.984352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:49.984376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:49.984384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:49.984452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:49.984459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:49.984508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:49.984520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:49.984916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:49.984924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ORE DEBUG: import_s3.cpp:527: [Import] [s3:102] AllocateResource 2025-10-24T16:28:51.026595Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:544: [Import] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2025-10-24T16:28:51.026602Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:552: [Import] [s3:102] Restart: attempt# 0 2025-10-24T16:28:51.030520Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-10-24T16:28:51.031522Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:51.031537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:28:51.031657Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:51.031665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:28:51.031767Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.031778Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:27274 Accept: */* Connection: 2025-10-24T16:28:51.032006Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: 2025-10-24T16:28:51.032035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:51.032057Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A55EC5EE-E739-44B4-85E3-01D3F9AAF585 amz-sdk-request: attempt=1 2025-10-24T16:28:51.032065Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 content-type: application/xml 2025-10-24T16:28:51.032078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 375 2025-10-24T16:28:51.032109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:51.032706Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 35d3d7899d70c742e71e02584eabd1de ContentLength: 375 } } 2025-10-24T16:28:51.033261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:51.044308Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:51.060366Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:51.060407Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:51.060427Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv, range# 0-374 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:27274 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D4128C57-E49F-4F0B-A366-C048345E6B68 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-374 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 375 2025-10-24T16:28:51.061512Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 35d3d7899d70c742e71e02584eabd1de Body: 375b } 2025-10-24T16:28:51.061536Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 375, body-size# 375 2025-10-24T16:28:51.061712Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 405 2025-10-24T16:28:51.076405Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 375 WrittenBytes: 239 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:51.076431Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 35d3d7899d70c742e71e02584eabd1de ProcessedBytes: 375 WrittenBytes: 239 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:51.076441Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 239, writtenRows# 1 2025-10-24T16:28:51.095783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-10-24T16:28:51.095830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:51.095899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-10-24T16:28:51.095918Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 239 RowsProcessed: 1 } 2025-10-24T16:28:51.095942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:51.095947Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.095953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:51.095962Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:28:51.096034Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:51.096906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.096985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.096996Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:51.097024Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:51.097029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.097036Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:51.097040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.097045Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:51.097070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:51.097080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.097088Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:51.097092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:51.097135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:51.097818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:51.097833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] [GOOD] >> TImportTests::ShouldBlockMerge [GOOD] >> TImportTests::MultipleViewCreationRetries >> TImportTests::ShouldNotWriteBillRecordOnCommonDb ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 3422, MsgBus: 19621 2025-10-24T16:28:10.072976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825930064585345:2064];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:10.073255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:10.079106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001ee1/r3tmp/tmp7pqot2/pdisk_1.dat 2025-10-24T16:28:10.125889Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:10.126069Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825930064585322:2081] 1761323290072044 != 1761323290072047 TServer::EnableGrpc on GrpcPort 3422, node 1 2025-10-24T16:28:10.133089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:10.133099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:10.133105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:10.133163Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19621 TClient is connected to server localhost:19621 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:28:10.185010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:10.185040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:10.186064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: 2025-10-24T16:28:10.187099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:10.193935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:10.196324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:10.500405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825930064585997:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.500431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825930064586008:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.500442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.501350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:10.503334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825930064586012:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.503446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:10.503524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-10-24T16:28:10.503540Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825930064586011:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:28:10.597098Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825930064586065:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:10.732617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-10-24T16:28:10.771299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.771340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.771366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-10-24T16:28:10.771382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-10-24T16:28:10.771394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-10-24T16:28:10.771408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-10-24T16:28:10.771422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-10-24T16:28:10.771434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-10-24T16:28:10.771449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-10-24T16:28:10.771464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-10-24T16:28:10.771476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-10-24T16:28:10.771488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-10-24T16:28:10.771500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7564825930064586324:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-10-24T16:28:10.775441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825930064586322:2347];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-10-24T16:28:10.775475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825930064586322:2347];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-10-24T16:28:10.775513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7564825930064586322:2347];tablet_id=72075186224037897;process=TTxIn ... nternal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.856586Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.856900Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.856909Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.856914Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.857242Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.857248Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.857250Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.857993Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858008Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858016Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858406Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858415Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858417Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858829Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858837Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.858840Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.859880Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.859895Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.859898Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.860063Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.860072Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.860074Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861250Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861268Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861271Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861328Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861339Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.861342Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862439Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862453Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862455Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862831Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862840Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.862842Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.863527Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.863536Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.863539Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864139Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864148Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864150Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864606Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864614Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.864616Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.881312Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.881345Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:45.881348Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=63;result=not_found; 2025-10-24T16:28:46.080588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:46.080606Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelHungOperationShouldSucceed[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:50.380843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:50.380873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:50.380880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:50.380889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:50.380897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:50.380902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:50.380913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:50.380980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:50.381134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:50.381211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:50.399256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:50.399284Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:50.402550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:50.402665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:50.402696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:50.407627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:50.407724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:50.407845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.407948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:50.408662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:50.408707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:50.408991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:50.409005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:50.409033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:50.409042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:50.409048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:50.409067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.410389Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:28:50.433457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:50.433537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.433591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:50.433599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:50.433664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:50.433680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:50.434395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.434438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:50.434483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.434491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:50.434495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:50.434499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:50.434936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.434951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:50.434958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:50.435359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.435371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.435377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.435385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:50.436151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:50.436597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:50.436657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:50.436892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.436921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:50.436928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.436995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:50.437003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.437043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:50.437055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:50.437478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:50.437488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 10-24T16:28:51.488393Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 133 2025-10-24T16:28:51.488567Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:51.488575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:28:51.488807Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:51.488822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:51.488827Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:28:51.488833Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:28:51.488840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:28:51.488861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:51.489368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.489383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-10-24T16:28:51.489390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.489523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:28:51.489786Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:28:51.491832Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:28:51.491870Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:28:51.491892Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:28:51.491896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:28:51.492000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:51.492008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:51.492013Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:51.492046Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:28:51.492057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:51.492063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:432:2401] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:51.513739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6705: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 7155 } } 2025-10-24T16:28:51.513775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:51.513817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 7155 } } 2025-10-24T16:28:51.513837Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000003:102] while waiting to finish at 72075186233409546" } TxId: 102 ExecLatency: 5 ProposeLatency: 6 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 7155 } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:51.514093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:51.514102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:51.514121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:51.514137Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:51.514159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:51.514166Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.514172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:51.514181Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-10-24T16:28:51.514235Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:51.515266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.515415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.515467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:51.515477Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:51.515499Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:51.515506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.515512Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:51.515516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.515522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:51.515542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2318] message: TxId: 102 2025-10-24T16:28:51.515551Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:51.515558Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:51.515563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:51.515618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:51.517118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:51.517136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:432:2401] TestWaitNotification: OK eventTxId 102 >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] >> TImportTests::ShouldNotWriteBillRecordOnCommonDb [GOOD] >> TImportTests::ShouldRestoreAnyAzReadReplicas >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Raw] >> TxUsage::WriteToTopic_Demo_27_Table >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile >> TImportTests::ShouldRestoreAnyAzReadReplicas [GOOD] >> TImportTests::ShouldFailOnOutboundKey >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Raw] >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageYellow >> TRestoreTests::ExportImportPg >> TxUsage::WriteToTopic_Demo_45_Table >> TImportTests::ShouldFailOnOutboundKey [GOOD] >> TImportTests::MultipleViewCreationRetries [GOOD] >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> TRestoreTests::ShouldFailOnInvalidValue[Raw] [GOOD] >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Raw] >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 27819, MsgBus: 25932 2025-10-24T16:28:31.790838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:31.790897Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826022466747240:2259];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:31.790934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f92/r3tmp/tmpz6i74S/pdisk_1.dat 2025-10-24T16:28:31.816576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:31.816607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:31.817777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:31.827052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826022466747008:2081] 1761323311785136 != 1761323311785139 2025-10-24T16:28:31.838464Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27819, node 1 2025-10-24T16:28:31.846999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:31.847016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:31.847018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:31.847071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:31.865800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25932 TClient is connected to server localhost:25932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:31.949512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:31.950879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:31.951227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:31.951548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-10-24T16:28:31.957962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323312007, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:31.958276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710657:0 2025-10-24T16:28:31.958295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-10-24T16:28:31.958341Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826022466747531:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 3 2025-10-24T16:28:31.958448Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022466746976:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958490Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022466746979:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958505Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826022466746982:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958554Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022466747573:2265][/Root] Path was updated to new version: owner# [1:7564826022466747277:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.958638Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826022466747531:2251] Ack update: ack to# [1:7564826022466747430:2195], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:31.958681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-10-24T16:28:31.959374Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022466747549:2287][/Root] Path was updated to new version: owner# [1:7564826022466747543:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.959437Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826022466747550:2288][/Root] Path was updated to new version: owner# [1:7564826022466747544:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.244500Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:32.245025Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026761714940:2298][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7564826022466747277:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1024 16:28:32.250624198 373383 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.250701299 373383 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.250068Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f92/r3tmp/spilling-tmp-runner/node_1_85f9bc6a-80fe375f-15479089-419ebef4, actor: [1:7564826026761714948:2304] 2025-10-24T16:28:32.250190Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f92/r3tmp/spilling-tmp-runner 2025-10-24T16:28:32.251620Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsrvj6k3zr1esy0acev14", Request has 18444982750397.300005s seconds to be completed 2025-10-24T16:28:32.252299Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsrvj6k3zr1esy0acev14", Created new session, sessionId: ydb://session/3?node_id=1&id=NWI5NjI1ZGQtZDYyNDE3OTUtODRmMTRhZS1hNDgyNTQwOA==, workerId: [1:7564826026761714967:2322], database: /Root, longSession: 1, local sessions count: 1 E1024 16:28:32.252495095 373383 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.252524958 373383 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.252344Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsrvj6k3zr1esy0acev14 2025-10-24T16:28:32.252362Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:32.252367Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:32.252376Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:32.252805Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026761714969:2300][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7564826022466747277:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.252880Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026761714970:2301][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826022466747277:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1024 16:28:32.255205100 373382 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.255240594 373382 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:32.255606636 373382 dns_resolver_ares.cc:452] no server name suppli ... GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:52.926621Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826112767821988:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtd945b67nyxmwpdtsp86, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzJjNjgyMTgtZjhhYjU0NDAtYzU0MDM2ZjEtOGJlMjVkMGY=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:52.927972Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtd945b67nyxmwpdtsp86, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzJjNjgyMTgtZjhhYjU0NDAtYzU0MDM2ZjEtOGJlMjVkMGY=, PoolId: default}. Compute actor has finished execution: [9:7564826112767821992:2614] 2025-10-24T16:28:52.928042Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtd945b67nyxmwpdtsp86, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzJjNjgyMTgtZjhhYjU0NDAtYzU0MDM2ZjEtOGJlMjVkMGY=, PoolId: default}. Compute actor has finished execution: [9:7564826112767821993:2615] 2025-10-24T16:28:52.928205Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtd945b67nyxmwpdtsp86", Forwarded response to sender actor, requestId: 34, sender: [9:7564826112767821968:2609], selfId: [9:7564826108472852963:2197], source: [9:7564826112767821970:2610] 2025-10-24T16:28:52.928415Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=MzJjNjgyMTgtZjhhYjU0NDAtYzU0MDM2ZjEtOGJlMjVkMGY=, workerId: [9:7564826112767821970:2610], local sessions count: 0 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] >> THealthCheckTest::BridgeTimeDifference [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldFailOnOutboundKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:52.021820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:52.021849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:52.021855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:52.021866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:52.021874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:52.021879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:52.021889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:52.021918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:52.022059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:52.022133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:52.037624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:52.037654Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:52.041011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:52.041045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:52.041079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:52.042940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:52.043166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:52.043288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:52.043380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:52.045544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:52.045600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:52.045901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:52.045914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:52.045956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:52.045966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:52.045972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:52.046010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.047627Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:52.069500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:52.069606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.069690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:52.069699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:52.069767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:52.069785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:52.070783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:52.070853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:52.070933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.070945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:52.070952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:52.070958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:52.071472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.071489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:52.071496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:52.071955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.071972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:52.071979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:52.071987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:52.072517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:52.072942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:52.072994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:52.073235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:52.073259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:52.073265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:52.073320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:52.073324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:52.073366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:52.073376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:52.073833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:52.073843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... HTTP/1.1 HEADERS: Host: localhost:25034 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4CAD77CD-AC18-4197-8FC2-792B47F9EE9A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-10-24T16:28:53.247957Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710758] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a3ed28bfb53c9214f635c51ed6b618c4 Body: 14b } 2025-10-24T16:28:53.247978Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710758] Content size: processed-bytes# 0, content-length# 14, body-size# 14 2025-10-24T16:28:53.248016Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710758] Upload rows: count# 1, size# 34 2025-10-24T16:28:53.249047Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710758] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 112 Data: 573661474 Data: 1635131948 Data: 828732780 Data: 2594 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 14 MdLen: 32 } DownloadState: } } 2025-10-24T16:28:53.249078Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710758] Process download info at 'UploadResponse': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 14 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 112 Data: 573661474 Data: 1635131948 Data: 828732780 Data: 2594 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 14 MdLen: 32 } DownloadState: } 2025-10-24T16:28:53.249092Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710758] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-10-24T16:28:53.252961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 356 RawX2: 12884904212 } Origin: 72075186233409547 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: false Explain: "Key is out of range on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:53.252986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710758, tablet: 72075186233409547, partId: 0 2025-10-24T16:28:53.253018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 356 RawX2: 12884904212 } Origin: 72075186233409547 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: false Explain: "Key is out of range on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:53.253037Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 356 RawX2: 12884904212 } Origin: 72075186233409547 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: false Explain: "Key is out of range on line: \"a1\",\"value1\"" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:53.253055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710758:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.253092Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:53.254108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 12884904209 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:53.265156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710758, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:53.265184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 12884904209 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:53.265199Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 351 RawX2: 12884904209 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:53.265215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710758:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265220Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710758:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710758:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265237Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710758:0 129 -> 240 2025-10-24T16:28:53.265287Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:53.265813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.265943Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2025-10-24T16:28:53.265958Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:28:53.265963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:28:53.265967Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:28:53.265974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:28:53.265978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-10-24T16:28:53.265994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710758 2025-10-24T16:28:53.266000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:28:53.266006Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-10-24T16:28:53.266010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710758:0 2025-10-24T16:28:53.266039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:28:53.266651Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-10-24T16:28:53.266676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710758 2025-10-24T16:28:53.266688Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:53.266694Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-10-24T16:28:53.266737Z node 3 :IMPORT NOTICE: schemeshard_import__create.cpp:764: TImport::TTxProgress: issues during restore, cancelling, info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Transferring SubState: Subscribed WaitTxId: 0 SrcPath: SrcPrefix: Issue: 'shard: 72057594046678944:2, error: Key is out of range on line: "a1","value1"' } 2025-10-24T16:28:53.267233Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:53.267261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:28:53.267268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::MultipleViewCreationRetries [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:50.738941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:50.738967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:50.738974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:50.738983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:50.738991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:50.738996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:50.739007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:50.739034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:50.739180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:50.739252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:50.756614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:50.756647Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:50.760300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:50.760344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:50.760392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:50.762048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:50.762273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:50.762380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.762442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:50.763455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:50.763495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:50.763760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:50.763771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:50.763812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:50.763821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:50.763827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:50.763875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.765317Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:50.787983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:50.788052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.788102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:50.788110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:50.788182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:50.788197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:50.788791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.788832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:50.788885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.788896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:50.788902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:50.788908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:50.789364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.789378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:50.789386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:50.789703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.789713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:50.789719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.789725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:50.790383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:50.790822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:50.790874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:50.791080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:50.791103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:50.791111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.791199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:50.791206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:50.791256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:50.791269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:50.791710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:50.791719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... Id: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "view5" PathId { OwnerId: 72057594046678944 LocalId: 7 } Version: 1 QueryText: "SELECT * FROM `view4`" CapturedContext { PathPrefix: "/MyRoot" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241067Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/view6" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241082Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/view6" took 16us result status StatusSuccess 2025-10-24T16:28:53.241130Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/view6" PathDescription { Self { Name: "view6" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 281474976710763 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 10 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "view6" PathId { OwnerId: 72057594046678944 LocalId: 8 } Version: 1 QueryText: "SELECT * FROM `view5`" CapturedContext { PathPrefix: "/MyRoot" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241185Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/view7" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241200Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/view7" took 16us result status StatusSuccess 2025-10-24T16:28:53.241252Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/view7" PathDescription { Self { Name: "view7" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 281474976710764 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 10 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "view7" PathId { OwnerId: 72057594046678944 LocalId: 9 } Version: 1 QueryText: "SELECT * FROM `view6`" CapturedContext { PathPrefix: "/MyRoot" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241308Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/view8" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241322Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/view8" took 15us result status StatusSuccess 2025-10-24T16:28:53.241372Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/view8" PathDescription { Self { Name: "view8" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 281474976710765 CreateStep: 5000010 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 10 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "view8" PathId { OwnerId: 72057594046678944 LocalId: 10 } Version: 1 QueryText: "SELECT * FROM `view7`" CapturedContext { PathPrefix: "/MyRoot" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241431Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/view9" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:53.241443Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/view9" took 14us result status StatusSuccess 2025-10-24T16:28:53.241491Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/view9" PathDescription { Self { Name: "view9" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 281474976710766 CreateStep: 5000011 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 10 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "view9" PathId { OwnerId: 72057594046678944 LocalId: 11 } Version: 1 QueryText: "SELECT * FROM `view8`" CapturedContext { PathPrefix: "/MyRoot" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 11 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TRestoreTests::ExportImportPg [GOOD] >> TRestoreTests::ExportImportUuid >> TRestoreTests::ShouldFailOnInvalidValue[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnInvalidFrame >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001879/r3tmp/tmpcfg0MA/pdisk_1.dat 2025-10-24T16:28:36.249482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.249521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.250610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:36.250685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.251301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:36.277307Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:36.277487Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826041943608169:2081] 1761323316164869 != 1761323316164872 TServer::EnableGrpc on GrpcPort 29019, node 1 2025-10-24T16:28:36.298181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:36.298198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:36.298201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:36.298261Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:36.384707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:36.388475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:36.472519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } 2025-10-24T16:28:38.149813Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.149943Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.153431Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.153837Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.154409Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:494:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.154521Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.154570Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:38.155143Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:486:2165], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.155219Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.155263Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001879/r3tmp/tmp2IUEyT/pdisk_1.dat 2025-10-24T16:28:38.294157Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:38.340654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.340702Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.340840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.340856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.387549Z node 2 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-10-24T16:28:38.387749Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:38.387907Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2627, node 2 TClient is connected to server localhost:26276 2025-10-24T16:28:38.519230Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:38.519246Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:38.519250Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:38.519381Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } 2025-10-24T16:28:40.011776Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.011840Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.021543Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.022103Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.022352Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:40.022366Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:40.022417Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:40.022731Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:679:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:40.022885Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:40.022902Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001879/r3tmp/tmpYXtJhQ/pdisk_1.dat 2025-10-24T16:28:40.150200Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:40.194503Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:40.194549Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:40.194727Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:40.194741Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:40.235239Z node 4 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-10-24T16:28:40.235468Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:40.235610Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9638, node 4 TClient is connected to server localhost:26000 2025-10-24T16:28:40.364527Z node 4 :NET_CLASSIFIER ... E WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:50.505267Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21069, node 11 TClient is connected to server localhost:29320 2025-10-24T16:28:50.584945Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:50.584965Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:50.584969Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:50.585102Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "0" status: RED message: "Cluster is not bootstrapped" } location { id: 11 } TServer::EnableGrpc on GrpcPort 27328, node 13 TClient is connected to server localhost:11894 2025-10-24T16:28:52.772942Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:52.773099Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:52.773515Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:52.773689Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:52.786897Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:572: SchemeBoardDelete /Root Strong=0 2025-10-24T16:28:52.786916Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:572: SchemeBoardDelete /Root Strong=0 2025-10-24T16:28:52.787664Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1210:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:52.787976Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788113Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788694Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:1212:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:52.788788Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788815Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:739:2317], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:52.788836Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [16:743:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:52.788906Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788946Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788955Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788986Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:52.788991Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:52.877639Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:52.877933Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:52.877973Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:52.878619Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:52.878636Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:52.878641Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:52.878826Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:52.879403Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:37:2084] 1761323331043803 != 1761323331043808 2025-10-24T16:28:52.880813Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:52.880845Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:52.882846Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:52.882879Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:52.884762Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:52.884801Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:52.885058Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [14:76:2075] 1761323331045572 != 1761323331045576 2025-10-24T16:28:52.886453Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [16:154:2075] 1761323331047150 != 1761323331047153 2025-10-24T16:28:52.886513Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [15:115:2075] 1761323331046448 != 1761323331046452 2025-10-24T16:28:52.972501Z node 13 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2025-10-24T16:28:52.972756Z node 13 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-10-24T16:28:52.972819Z node 13 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2025-10-24T16:28:52.972907Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:52.973045Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:52.973068Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:52.973107Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } pile { name: "pile0" } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 5 } issue_log { id: "YELLOW-c616-1231c6b1-13" status: YELLOW message: "Clock skew above recommended limit" location { compute { node { id: 13 host: "::1" port: 12001 } pile { name: "pile0" } } database { name: "/Root" } } type: "CLOCK_SKEW" level: 5 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } pile { name: "pile0" } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 5 } issue_log { id: "YELLOW-e2b0-1231c6b1" status: YELLOW message: "Compute is overloaded" location { compute { pile { name: "pile0" } } database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" type: "BRIDGE_PILE" level: 3 } issue_log { id: "YELLOW-f475-1231c6b1" status: YELLOW message: "Clock skew issues" location { compute { pile { name: "pile0" } } database { name: "/Root" } } reason: "YELLOW-c616-1231c6b1-13" type: "BRIDGE_PILE" level: 3 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } pile { name: "pile1" } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 5 } issue_log { id: "YELLOW-7932-1231c6b1-16" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 16 host: "::1" port: 12004 } pile { name: "pile1" } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 5 } issue_log { id: "YELLOW-2a30-1231c6b1" status: YELLOW message: "There are compute issues" location { database { name: "/Root" } } reason: "YELLOW-e2b0-1231c6b1" reason: "YELLOW-f475-1231c6b1" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-c616-1231c6b1" status: YELLOW message: "Clock skew above recommended limit" location { database { name: "/Root" } } type: "CLOCK_SKEW" level: 3 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-2a30-1231c6b1" type: "DATABASE" level: 1 } database_status { name: "/Root" overall: YELLOW storage { overall: GREEN pools { id: "static" overall: GREEN groups { id: "0" overall: GREEN vdisks { id: "13-1-0" overall: GREEN } } } } compute { overall: YELLOW nodes { id: "13" overall: YELLOW load { overall: YELLOW load: 145.446777 cores: 64 } clock_skew { overall: YELLOW clock_skew: -15 } } nodes { id: "14" overall: YELLOW load { overall: YELLOW load: 145.446777 cores: 64 } } nodes { id: "15" overall: YELLOW load { overall: YELLOW load: 145.446777 cores: 64 } } nodes { id: "16" overall: YELLOW load { overall: YELLOW load: 145.446777 cores: 64 } } clock_skew { overall: YELLOW clock_skew: 15 } } } location { id: 13 host: "::1" port: 12001 pile { name: "pile0" } } >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> TRestoreTests::ShouldFailOnInvalidFrame [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-10-24T16:28:03.563469Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:03.563524Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:03.576053Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:03.576112Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:03.600399Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:03.600586Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2162], cookie=14664058553968588116, session=0, seqNo=0) 2025-10-24T16:28:03.600636Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:03.612243Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2162], cookie=14664058553968588116, session=1) 2025-10-24T16:28:03.612450Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=111, session=1, semaphore="Lock1" count=1) 2025-10-24T16:28:03.612503Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:03.612519Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:03.623459Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=111) 2025-10-24T16:28:03.623571Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2162], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:03.636213Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2162], cookie=222) 2025-10-24T16:28:03.636388Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=15578715368214530207, name="Lock1") 2025-10-24T16:28:03.636422Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=15578715368214530207) 2025-10-24T16:28:03.814701Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:03.814749Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:03.818598Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:03.818686Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:03.850758Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:03.851041Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=4068461978595975237, session=0, seqNo=0) 2025-10-24T16:28:03.851082Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:03.862139Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=4068461978595975237, session=1) 2025-10-24T16:28:03.862268Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2163], cookie=12792030633482078900, session=0, seqNo=0) 2025-10-24T16:28:03.862315Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:03.873403Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2163], cookie=12792030633482078900, session=2) 2025-10-24T16:28:03.873703Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-10-24T16:28:03.873760Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-10-24T16:28:03.873781Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-10-24T16:28:03.886422Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=111) 2025-10-24T16:28:03.886594Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-10-24T16:28:03.886660Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-10-24T16:28:03.886682Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-10-24T16:28:03.897843Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=112) 2025-10-24T16:28:03.897989Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:03.898069Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-10-24T16:28:03.912542Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=222) 2025-10-24T16:28:03.912595Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=223) 2025-10-24T16:28:03.912701Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=333, session=2, semaphore="Lock1" count=1) 2025-10-24T16:28:03.912834Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2163], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-10-24T16:28:03.924288Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=333) 2025-10-24T16:28:03.924343Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2163], cookie=334) 2025-10-24T16:28:04.352182Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:04.372408Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:04.730416Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:04.743438Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.088596Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.099587Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.449860Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.464341Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:05.835749Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:05.860296Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.201557Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.212759Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.558816Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.569895Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:06.919382Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:06.930232Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:07.298614Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:07.309668Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:07.696998Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:07.707878Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.073374Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.084266Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.462393Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.481988Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:08.863124Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:08.880249Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:09.256352Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:09.267176Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:09.656117Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:09.670779Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:10.052117Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:10.068698Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:10.468572Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:10.481379Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:10.892020Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:10.903835Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:11.284026Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:11.296934Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:11.691789Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:11.704436Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:12.114503Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [7205759403 ... 2025-10-24T16:28:47.636275Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:48.035634Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:48.047177Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:48.406356Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:48.417347Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:48.791469Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:48.803051Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:49.157067Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:49.168289Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:49.519386Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:49.530330Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:49.935511Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:49.946470Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:50.310575Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:50.321637Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:50.680512Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:50.691486Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:51.028294Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:51.039875Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:51.380443Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:51.391573Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:51.760307Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:51.772502Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:52.121041Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:52.132055Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:52.476681Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:52.487861Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:52.829427Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:52.840491Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:53.187213Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-10-24T16:28:53.198142Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-10-24T16:28:53.662213Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-10-24T16:28:53.662247Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-10-24T16:28:53.673355Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-10-24T16:28:53.693884Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:599:2536], cookie=8198561305252161849) 2025-10-24T16:28:53.693929Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:599:2536], cookie=8198561305252161849) 2025-10-24T16:28:53.694016Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:602:2539], cookie=4623326934649262902) 2025-10-24T16:28:53.694023Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:602:2539], cookie=4623326934649262902) 2025-10-24T16:28:53.694095Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:605:2542], cookie=2444087406532240700, name="Lock1") 2025-10-24T16:28:53.694109Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:605:2542], cookie=2444087406532240700) 2025-10-24T16:28:53.694176Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:608:2545], cookie=11458263625208702882, name="Lock1") 2025-10-24T16:28:53.694182Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:608:2545], cookie=11458263625208702882) 2025-10-24T16:28:53.907464Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-10-24T16:28:53.907497Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-10-24T16:28:53.911699Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-10-24T16:28:53.911875Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-10-24T16:28:53.943607Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-10-24T16:28:53.943770Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=8681138232076168326, session=0, seqNo=0) 2025-10-24T16:28:53.943809Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-10-24T16:28:53.954623Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=8681138232076168326, session=1) 2025-10-24T16:28:53.954712Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=8487493769726469274, session=0, seqNo=0) 2025-10-24T16:28:53.954750Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-10-24T16:28:53.965519Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=8487493769726469274, session=2) 2025-10-24T16:28:53.965589Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=7261721001138225680, session=0, seqNo=0) 2025-10-24T16:28:53.965623Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-10-24T16:28:53.976570Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=7261721001138225680, session=3) 2025-10-24T16:28:53.976732Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=4242467830241155633, name="Sem1", limit=3) 2025-10-24T16:28:53.976773Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-10-24T16:28:53.987654Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=4242467830241155633) 2025-10-24T16:28:53.987760Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=111, session=1, semaphore="Sem1" count=2) 2025-10-24T16:28:53.987799Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-10-24T16:28:53.987845Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=222, session=2, semaphore="Sem1" count=2) 2025-10-24T16:28:53.987907Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=333, session=3, semaphore="Sem1" count=1) 2025-10-24T16:28:53.998766Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=111) 2025-10-24T16:28:53.998798Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=222) 2025-10-24T16:28:53.998805Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=333) 2025-10-24T16:28:53.998947Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2182], cookie=8610584593403294766, name="Sem1") 2025-10-24T16:28:53.998970Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2182], cookie=8610584593403294766) 2025-10-24T16:28:53.999031Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2185], cookie=7355266942594617231, name="Sem1") 2025-10-24T16:28:53.999038Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2185], cookie=7355266942594617231) 2025-10-24T16:28:53.999074Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=444, name="Sem1") 2025-10-24T16:28:53.999101Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-10-24T16:28:53.999113Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-10-24T16:28:53.999120Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-10-24T16:28:54.010000Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=444) 2025-10-24T16:28:54.010160Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:168:2190], cookie=17822703166443836338, name="Sem1") 2025-10-24T16:28:54.010184Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:168:2190], cookie=17822703166443836338) 2025-10-24T16:28:54.010259Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2193], cookie=514944971793226192, name="Sem1") 2025-10-24T16:28:54.010270Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2193], cookie=514944971793226192) >> THealthCheckTest::LayoutCorrect [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnInvalidFrame [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:53.122882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:53.122906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:53.122912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:53.122922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:53.122929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:53.122934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:53.122944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:53.122972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:53.123101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:53.123167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:53.139389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:53.139412Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:53.142937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:53.142971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:53.143003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:53.145021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:53.145209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:53.145320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.145374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:53.146465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:53.146505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:53.146732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:53.146740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:53.146783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:53.146791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:53.146797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:53.146820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.148322Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:53.168720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:53.168809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.168875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:53.168884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:53.168963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:53.168976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:53.169622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.169667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:53.169733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.169743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:53.169748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:53.169754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:53.170105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.170113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:53.170121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:53.170391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.170398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.170404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.170411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:53.171068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:53.171455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:53.171506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:53.171734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.171756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.171763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.171824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:53.171831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.171899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:53.171914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:53.172452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:53.172466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... mport] [s3:102] Handle TEvResourceBroker::TEvResourceAllocated { TaskId: 1 } 2025-10-24T16:28:54.151985Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:552: [Import] [s3:102] Restart: attempt# 0 2025-10-24T16:28:54.155116Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2040 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BC2A440C-CD22-417F-9FF4-640914878878 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:54.160812Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:28:54.160836Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv.zst FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-10-24T16:28:54.161102Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:54.161113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:28:54.161205Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:54.161214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:28:54.161311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.161323Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:54.161503Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:54.161519Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:54.161528Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:28:54.161535Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 REQUEST: HEAD /data_00.csv.zst HTTP/1.12025-10-24T16:28:54.161543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 HEADERS: Host: localhost:2040 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 71FD0362-3542-4CD2-95CD-7849F5B89444 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:54.161567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true S3_MOCK::HttpServeRead: /data_00.csv.zst / 13 2025-10-24T16:28:54.161910Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a5511826537e4249478b686b99281952 ContentLength: 13 } } 2025-10-24T16:28:54.162400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:54.173169Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:54.184063Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a5511826537e4249478b686b99281952 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:54.184092Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: a5511826537e4249478b686b99281952 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:54.184115Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-12 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:2040 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 43CD250C-B4BC-4614-B26D-369446DC0693 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-12 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 13 2025-10-24T16:28:54.185064Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a5511826537e4249478b686b99281952 Body: 13b } 2025-10-24T16:28:54.185081Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 13, body-size# 13 2025-10-24T16:28:54.185094Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 0, error# Cannot process data: Unknown frame descriptor, writtenBytes# 0, writtenRows# 0 2025-10-24T16:28:54.197395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:54.197422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:54.197452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:54.197469Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: Unknown frame descriptor" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:54.197487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:54.197494Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.197500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:54.197509Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:28:54.197560Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:54.198151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.198215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.198223Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:54.198238Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:54.198244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:54.198250Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:54.198254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:54.198259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:54.198277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:54.198285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:54.198291Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:54.198296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:54.198328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:54.198887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:54.198900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> TRestoreTests::ExportImportUuid [GOOD] >> TRestoreTests::ExportImportWithChecksums[Raw] >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 10949, MsgBus: 22184 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f67/r3tmp/tmp4oOs4U/pdisk_1.dat 2025-10-24T16:28:31.822353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:31.822395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:31.832102Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:31.833055Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826019537403042:2081] 1761323311746106 != 1761323311746109 TServer::EnableGrpc on GrpcPort 10949, node 1 2025-10-24T16:28:31.849904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:31.849917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:31.849919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:31.849962Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22184 2025-10-24T16:28:31.874969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:31.875001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:31.880229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:31.952419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:31.953667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:31.953996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:31.954234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:31.958199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323312007, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:31.958530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710657:0 2025-10-24T16:28:31.958539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-10-24T16:28:31.958573Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826019537403558:2244] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 3 2025-10-24T16:28:31.958615Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019537403010:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958647Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019537403013:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958658Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019537403016:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.958731Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019537403510:2217][/Root] Path was updated to new version: owner# [1:7564826019537403295:2104], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.958775Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826019537403558:2244] Ack update: ack to# [1:7564826019537403401:2154], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:31.958805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-10-24T16:28:31.959080Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019537403588:2288][/Root] Path was updated to new version: owner# [1:7564826019537403582:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.959117Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019537403587:2287][/Root] Path was updated to new version: owner# [1:7564826019537403581:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.994043Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019537403656:2296][/Root/.metadata/script_executions] Set up state: owner# [1:7564826019537403295:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.994148Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019537403657:2297][/Root/.metadata/script_execution_leases] Set up state: owner# [1:7564826019537403295:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.994212Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019537403658:2298][/Root/.metadata/result_sets] Set up state: owner# [1:7564826019537403295:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.994636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:31.994653Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-10-24T16:28:32.248440Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:32.252319681 373315 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.252392983 373315 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:32.253856186 373314 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.253896213 373314 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.249967Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f67/r3tmp/spilling-tmp-runner/node_1_6c997428-f59ddb85-c4da33e0-4761d647, actor: [1:7564826023832370976:2304] 2025-10-24T16:28:32.250067Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f67/r3tmp/spilling-tmp-runner 2025-10-24T16:28:32.250185Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826023832370987:2300][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7564826019537403295:2104], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.252613Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsrvf0b1kvcznhze8n9vn", Request has 18444982750397.299011s seconds to be completed 2025-10-24T16:28:32.253275Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsrvf0b1kvcznhze8n9vn", Created new session, sessionId: ydb://session/3?node_id=1&id=ZjI4ZDVkZGMtYjY5NzJkMjUtYTM3OGE4YWUtODg2NDMxM2Y=, workerId: [1:7564826023832371002:2321], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:32.253326Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsrvf0b1kvcznhze8n9vn 2025-10-24T16:28:32.253340Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:32.253344Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:32.253350Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:32.256133Z node 1 :SCHEME_BOA ... point" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:53.829835Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826114004040542:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgte4ze4vtv2nwy3b68vyz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDIxNmUxYzgtMTE3NmY5MjctODRlY2JjMWEtYWE3NWM1YTc=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 2025-10-24T16:28:53.831308Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgte4ze4vtv2nwy3b68vyz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDIxNmUxYzgtMTE3NmY5MjctODRlY2JjMWEtYWE3NWM1YTc=, PoolId: default}. Compute actor has finished execution: [9:7564826114004040546:2614] 2025-10-24T16:28:53.831368Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgte4ze4vtv2nwy3b68vyz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDIxNmUxYzgtMTE3NmY5MjctODRlY2JjMWEtYWE3NWM1YTc=, PoolId: default}. Compute actor has finished execution: [9:7564826114004040547:2615] 2025-10-24T16:28:53.831578Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgte4ze4vtv2nwy3b68vyz", Forwarded response to sender actor, requestId: 34, sender: [9:7564826114004040523:2609], selfId: [9:7564826105414104184:2158], source: [9:7564826114004040524:2610] 2025-10-24T16:28:53.832247Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=ZDIxNmUxYzgtMTE3NmY5MjctODRlY2JjMWEtYWE3NWM1YTc=, workerId: [9:7564826114004040524:2610], local sessions count: 0 2025-10-24T16:28:53.914686Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444982750375.636939s seconds to be completed 2025-10-24T16:28:53.915219Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=9&id=YWE0NWI3ZjctYzFmOWE4OGYtZGZlYjJmMzktZDM0ZTlhZmE=, workerId: [9:7564826114004040555:2618], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:53.915252Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-10-24T16:28:53.915349Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=YWE0NWI3ZjctYzFmOWE4OGYtZGZlYjJmMzktZDM0ZTlhZmE=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 36, targetId: [9:7564826114004040555:2618] 2025-10-24T16:28:53.915358Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 36 timeout: 300.000000s actor id: [9:7564826114004040557:2913] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 14987, MsgBus: 9056 2025-10-24T16:28:31.642997Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826019439040948:2260];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:31.643177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f6e/r3tmp/tmprmcusg/pdisk_1.dat 2025-10-24T16:28:31.724032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:31.735438Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14987, node 1 2025-10-24T16:28:31.750921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:31.750932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:31.750934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:31.750985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:31.752178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:31.752197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:31.756224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9056 TClient is connected to server localhost:9056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:31.845461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:31.846962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:31.847277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:31.847593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:31.848282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323311895, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:31.848563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715657:0 2025-10-24T16:28:31.848573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-10-24T16:28:31.848614Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826019439041215:2239] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 3 2025-10-24T16:28:31.848709Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019439040672:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.848754Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019439040675:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.848764Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826019439040678:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:31.848857Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019439041182:2219][/Root] Path was updated to new version: owner# [1:7564826019439040968:2110], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.848942Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826019439041215:2239] Ack update: ack to# [1:7564826019439041045:2143], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:31.848968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2025-10-24T16:28:31.852082Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019439041247:2288][/Root] Path was updated to new version: owner# [1:7564826019439041241:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.852161Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826019439041246:2287][/Root] Path was updated to new version: owner# [1:7564826019439041240:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.856210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:31.888283Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019439041314:2292][/Root/.metadata/script_executions] Set up state: owner# [1:7564826019439040968:2110], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.888323Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019439041315:2293][/Root/.metadata/script_execution_leases] Set up state: owner# [1:7564826019439040968:2110], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.888334Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826019439041316:2294][/Root/.metadata/result_sets] Set up state: owner# [1:7564826019439040968:2110], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:31.888460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:31.888466Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-10-24T16:28:32.140329Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:32.141259510 373268 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.141334682 373268 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.141911Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsrr00fnfp2n1m7f6epbr", Request has 18444982750397.409712s seconds to be completed 2025-10-24T16:28:32.142620Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsrr00fnfp2n1m7f6epbr", Created new session, sessionId: ydb://session/3?node_id=1&id=ZjVmZGI3MTQtYTJkNmI3ZDAtYzAzY2NiZTctZjI3ZTI1ZTM=, workerId: [1:7564826023734008649:2318], database: /Root, longSession: 1, local sessions count: 1 E1024 16:28:32.145497608 373269 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.145547685 373269 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.142677Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsrr00fnfp2n1m7f6epbr 2025-10-24T16:28:32.142702Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:32.142705Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:32.142712Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:32.145772Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f6e/r3tmp/spilling-tmp-runner/node_1_94dd20a1-53228667-806c3cfd-5e3ee4e8, actor: [1:7564826023734008632:2302] 2025-10-24T16:28:32.145910Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f6e/r3tmp/spilling-tmp-runner E1024 16:28:32.146663283 373269 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.146697298 373269 channel.cc:120] channel stack builder failed: ... item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E1024 16:28:54.043649515 388764 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:54.043690805 388764 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:54.044232Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:54.058913Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826120558915224:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtecdffznqz9ajpfgcjpm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDBlOGYwY2MtMWYxZGU2NGQtYTQ0NzBkZTMtNzc0MmJkMTk=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:54.060087Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtecdffznqz9ajpfgcjpm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDBlOGYwY2MtMWYxZGU2NGQtYTQ0NzBkZTMtNzc0MmJkMTk=, PoolId: default}. Compute actor has finished execution: [9:7564826120558915228:2614] 2025-10-24T16:28:54.060145Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtecdffznqz9ajpfgcjpm, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZDBlOGYwY2MtMWYxZGU2NGQtYTQ0NzBkZTMtNzc0MmJkMTk=, PoolId: default}. Compute actor has finished execution: [9:7564826120558915229:2615] 2025-10-24T16:28:54.060309Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtecdffznqz9ajpfgcjpm", Forwarded response to sender actor, requestId: 34, sender: [9:7564826120558915205:2609], selfId: [9:7564826111968978907:2199], source: [9:7564826120558915206:2610] 2025-10-24T16:28:54.060811Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=ZDBlOGYwY2MtMWYxZGU2NGQtYTQ0NzBkZTMtNzc0MmJkMTk=, workerId: [9:7564826120558915206:2610], local sessions count: 0 2025-10-24T16:28:54.195182Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444982750375.356445s seconds to be completed 2025-10-24T16:28:54.195953Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=9&id=ZjJhZTQ1ZmUtMzhlN2U3ODAtZjc4ODgxYTctMWU1MTI4MzY=, workerId: [9:7564826120558915237:2618], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:54.196052Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-10-24T16:28:54.196202Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZjJhZTQ1ZmUtMzhlN2U3ODAtZjc4ODgxYTctMWU1MTI4MzY=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 36, targetId: [9:7564826120558915237:2618] 2025-10-24T16:28:54.196213Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 36 timeout: 300.000000s actor id: [9:7564826120558915239:2913] >> TRestoreTests::ShouldSucceedOnSingleShardTable[Raw] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 9655, MsgBus: 23963 2025-10-24T16:28:32.174979Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826026487722068:2071];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:32.174995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f5e/r3tmp/tmpIJnal2/pdisk_1.dat 2025-10-24T16:28:32.232933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:32.240917Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:32.241175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826026487722029:2081] 1761323312174628 != 1761323312174631 TServer::EnableGrpc on GrpcPort 9655, node 1 2025-10-24T16:28:32.260175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:32.260188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:32.260191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:32.260250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23963 2025-10-24T16:28:32.288664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:32.288692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:32.289571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:28:32.408275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:32.409683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:32.410034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:32.410302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:32.411009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323312455, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:32.411229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715657:0 2025-10-24T16:28:32.411237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-10-24T16:28:32.411276Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826026487722546:2243] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 3 2025-10-24T16:28:32.411366Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026487721997:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.411406Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026487722000:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.411414Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026487722003:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.411458Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026487722505:2225][/Root] Path was updated to new version: owner# [1:7564826026487722281:2106], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.411566Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826026487722546:2243] Ack update: ack to# [1:7564826026487722373:2144], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:32.411593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-10-24T16:28:32.411769Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026487722573:2287][/Root] Path was updated to new version: owner# [1:7564826026487722567:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.411821Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026487722574:2288][/Root] Path was updated to new version: owner# [1:7564826026487722568:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.464230Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026487722643:2294][/Root/.metadata/script_executions] Set up state: owner# [1:7564826026487722281:2106], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.464290Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026487722644:2295][/Root/.metadata/script_execution_leases] Set up state: owner# [1:7564826026487722281:2106], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.464303Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026487722645:2296][/Root/.metadata/result_sets] Set up state: owner# [1:7564826026487722281:2106], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.466597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:32.466614Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found E1024 16:28:32.670894286 373557 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.670974288 373557 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.670006Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:32.671181Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f5e/r3tmp/spilling-tmp-runner/node_1_87aa594d-ca2b1b51-cf840d94-65c036a6, actor: [1:7564826026487722665:2302] 2025-10-24T16:28:32.671273Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f5e/r3tmp/spilling-tmp-runner 2025-10-24T16:28:32.673946Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgss8z1dbzr36scyhtxkq9", Request has 18444982750396.877679s seconds to be completed 2025-10-24T16:28:32.674624Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgss8z1dbzr36scyhtxkq9", Created new session, sessionId: ydb://session/3?node_id=1&id=NmM5MGI1MmQtNTA3NDRmMmQtM2FhMjM4YWEtOGNkYWY5ZmM=, workerId: [1:7564826026487722684:2320], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:32.674665Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgss8z1dbzr36scyhtxkq9 2025-10-24T16:28:32.674683Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:32.674688Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:32.674695Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:32.676230806 373557 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.676271704 373557 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.676498Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026487722688:2298][/Root/.metadata/workload_manager/delayed_requests] Set up state: owne ... "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:54.200726Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826117217450556:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtegnezcyezrajpaf2v47, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGZiNTYxYjYtNGRiZDc3NTktNTNjYjE0ZDgtNjI5NzM0MjE=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:54.201912Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtegnezcyezrajpaf2v47, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGZiNTYxYjYtNGRiZDc3NTktNTNjYjE0ZDgtNjI5NzM0MjE=, PoolId: default}. Compute actor has finished execution: [9:7564826117217450560:2614] 2025-10-24T16:28:54.201965Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtegnezcyezrajpaf2v47, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGZiNTYxYjYtNGRiZDc3NTktNTNjYjE0ZDgtNjI5NzM0MjE=, PoolId: default}. Compute actor has finished execution: [9:7564826117217450562:2615] 2025-10-24T16:28:54.202142Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtegnezcyezrajpaf2v47", Forwarded response to sender actor, requestId: 34, sender: [9:7564826117217450537:2609], selfId: [9:7564826108627514213:2183], source: [9:7564826117217450538:2610] 2025-10-24T16:28:54.202334Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=NGZiNTYxYjYtNGRiZDc3NTktNTNjYjE0ZDgtNjI5NzM0MjE=, workerId: [9:7564826117217450538:2610], local sessions count: 0 >> TRestoreTests::ExportImportWithChecksums[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: 2025-10-24T16:28:38.367705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.367755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:38.401326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.401622Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:38.404406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.404447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:38.404456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:38.404796Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:38.405032Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:38.405063Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpp2wNPV/pdisk_1.dat 2025-10-24T16:28:38.534007Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:38.585188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.585232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.585346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:38.585357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:38.639193Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:38.639390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:38.639487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30520, node 1 TClient is connected to server localhost:14049 2025-10-24T16:28:38.745934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:38.745956Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:38.745960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:38.746037Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:40.178694Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.178747Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:40.181816Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.182105Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:40.182557Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:40.182617Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:40.182664Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:40.182912Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:40.183127Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:40.183137Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpm3hG1d/pdisk_1.dat 2025-10-24T16:28:40.263791Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:40.305621Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:40.305669Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:40.305830Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:40.305850Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:40.340219Z node 3 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-10-24T16:28:40.340494Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:40.340629Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62897, node 3 TClient is connected to server localhost:17680 2025-10-24T16:28:40.417151Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:40.417173Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:40.417177Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:40.417282Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:41.688124Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.688176Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:41.690552Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.690874Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:41.691162Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.691222Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.691251Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:41.691478Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:41.691526Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:41.691551Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpoUb0YQ/pdisk_1.dat 2025-10-24T16:28:41.805729Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:41.857892Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.857938Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.858075Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:41.858092Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:41.893242Z node 5 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-10-24T16:28:41.893427Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:41.893515Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5816, node 5 TClient is connected to server localhost:14857 2025-10-24T16:28:41.982796Z node 5 :NET_CLASSIFIER WARN: net_c ... 10-24T16:28:43.832999Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:43.833159Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:43.833173Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:43.876400Z node 7 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-10-24T16:28:43.876553Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:43.876675Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21873, node 7 TClient is connected to server localhost:29647 2025-10-24T16:28:44.013268Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:44.013287Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:44.013291Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:44.013429Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:44.984691Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:44.986671Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.986730Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.986753Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpmQwrUB/pdisk_1.dat 2025-10-24T16:28:45.118113Z node 9 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:45.152236Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.152279Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.176615Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6752, node 9 TClient is connected to server localhost:2283 2025-10-24T16:28:45.261323Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:45.261353Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:45.261359Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:45.261789Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:45.295940Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:45.992090Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-4e47-1231c6b1-BSController" status: RED message: "System tablet is unresponsive" location { compute { tablet { type: "BSController" id: "72057594037932033" } } database { name: "/Root" } } type: "SYSTEM_TABLET" level: 3 } issue_log { id: "RED-30ee-1231c6b1" status: RED message: "Compute has issues with system tablets" location { database { name: "/Root" } } reason: "RED-4e47-1231c6b1-BSController" type: "COMPUTE" level: 2 } issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "RED-846e-1231c6b1" status: RED message: "System tablet BSC didn\'t provide information" location { database { name: "/Root" } } type: "STORAGE" level: 2 } issue_log { id: "RED-40f1-1231c6b1-80c02825" status: RED message: "Pool failed" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "RED-02e1-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-02e1-1231c6b1-0" status: RED message: "Group failed" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-cc0d-1231c6b1-9-0-1-0-0-0" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-cc0d-1231c6b1-9-0-1-0-0-0" status: RED message: "VDisk is degraded" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-1-0-0-0" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "RED-da05-9-9-1" status: RED message: "PDisk is not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-1" path: "/home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpmQwrUB/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-edf5-1231c6b1" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" reason: "RED-30ee-1231c6b1" reason: "RED-846e-1231c6b1" reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } database_status { name: "/Root" overall: RED storage { overall: RED pools { id: "static" overall: RED groups { id: "0" overall: RED vdisks { id: "0-1-0-0-0" overall: RED pdisk { id: "9-1" overall: RED } } } } } compute { overall: RED nodes { id: "9" overall: YELLOW load { overall: YELLOW load: 141.82959 cores: 64 } } clock_skew { overall: GREEN } } } location { id: 9 host: "::1" port: 12001 } 2025-10-24T16:28:52.793320Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:52.795818Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:52.796295Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:451:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:52.796376Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:52.796392Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpPJbACe/pdisk_1.dat 2025-10-24T16:28:52.890591Z node 11 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:52.924841Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:52.924883Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:52.946917Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19586, node 11 TClient is connected to server localhost:6750 2025-10-24T16:28:53.027989Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:53.028008Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:53.028013Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:53.028160Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:53.974832Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:53.976782Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:53.977320Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:445:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:53.977403Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:53.977436Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001860/r3tmp/tmpeGe05A/pdisk_1.dat 2025-10-24T16:28:54.074229Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:54.106948Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:54.106990Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:54.143232Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16530, node 13 TClient is connected to server localhost:5529 2025-10-24T16:28:54.244519Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:54.244543Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:54.244547Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:54.244665Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration >> TImportTests::CompletedImportEndTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-10-24T16:28:44.130199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:44.170586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.170692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.170704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0027ac/r3tmp/tmpihB7eC/pdisk_1.dat 2025-10-24T16:28:44.257603Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.258580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.258609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.258714Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323323705254 != 1761323323705257 2025-10-24T16:28:44.289791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:44.338427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:44.382847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:44.458180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:44.473583Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:44.473827Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:44.473893Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:28:44.473948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:44.490829Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:44.491001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:44.491023Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:44.491195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:28:44.491206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:28:44.491215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:28:44.491260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:44.491278Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:44.491289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:28:44.501550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:44.505981Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:28:44.506049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:44.506079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:28:44.506085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:28:44.506090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:28:44.506096Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:44.506162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:44.506170Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:44.506312Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:28:44.506335Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:28:44.506357Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:44.506365Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:44.506373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:28:44.506379Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:44.506384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:44.506389Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:28:44.506395Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:44.506409Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:44.506416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:44.506423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:28:44.506517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:28:44.506522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:28:44.506544Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:28:44.506603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:28:44.506614Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:28:44.506633Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:28:44.506644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:44.506648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:28:44.506654Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:28:44.506659Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:44.506710Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:28:44.506714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:28:44.506719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:28:44.506724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:28:44.506736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:28:44.506741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:28:44.506745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:28:44.506750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:28:44.506756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:28:44.507056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:28:44.507067Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:28:44.517364Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:28:44.517394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:44.517402Z n ... Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-10-24T16:28:54.463656Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-10-24T16:28:54.463663Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463667Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-10-24T16:28:54.463672Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-10-24T16:28:54.463678Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-10-24T16:28:54.463742Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-10-24T16:28:54.463758Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:252: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-10-24T16:28:54.463763Z node 9 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-10-24T16:28:54.463776Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:683: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-10-24T16:28:54.463794Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463797Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-10-24T16:28:54.463800Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:54.463803Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:28:54.463815Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-10-24T16:28:54.463819Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-10-24T16:28:54.463821Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [3500:1234567890011] at 72075186224037888 2025-10-24T16:28:54.463826Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463828Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:54.463831Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-10-24T16:28:54.463833Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-10-24T16:28:54.463837Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463840Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-10-24T16:28:54.463842Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadInRS 2025-10-24T16:28:54.463866Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadInRS 2025-10-24T16:28:54.463872Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463876Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadInRS 2025-10-24T16:28:54.463881Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit BlockFailPoint 2025-10-24T16:28:54.463885Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BlockFailPoint 2025-10-24T16:28:54.463893Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.463897Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BlockFailPoint 2025-10-24T16:28:54.463901Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-10-24T16:28:54.463905Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:28:54.463911Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-10-24T16:28:54.464027Z node 9 :TX_DATASHARD TRACE: execute_write_unit.cpp:122: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-10-24T16:28:54.464048Z node 9 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:503: tx 1234567890011 at 72075186224037888 released its data 2025-10-24T16:28:54.464056Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-10-24T16:28:54.464061Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-10-24T16:28:54.464065Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-10-24T16:28:54.464069Z node 9 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:54.464073Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:54.464188Z node 9 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:54.464195Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-10-24T16:28:54.464200Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-10-24T16:28:54.464244Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-10-24T16:28:54.464257Z node 9 :TX_DATASHARD TRACE: datashard_write_operation.cpp:252: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-10-24T16:28:54.464262Z node 9 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-10-24T16:28:54.464271Z node 9 :TX_DATASHARD DEBUG: datashard_write_operation.cpp:596: tx 1234567890011 at 72075186224037888 restored its data 2025-10-24T16:28:54.464294Z node 9 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-10-24T16:28:54.464305Z node 9 :TX_DATASHARD TRACE: locks.cpp:194: Lock 1234567890001 marked broken at v{min} 2025-10-24T16:28:54.464318Z node 9 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-10-24T16:28:54.464327Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:54.464332Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-10-24T16:28:54.464337Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-10-24T16:28:54.464342Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-10-24T16:28:54.464385Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-10-24T16:28:54.464390Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-10-24T16:28:54.464394Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:54.464398Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:54.464405Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-10-24T16:28:54.464409Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:54.464414Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-10-24T16:28:54.464418Z node 9 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:54.464422Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-10-24T16:28:54.464426Z node 9 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:54.464430Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:54.464487Z node 9 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-10-24T16:28:54.464544Z node 9 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:54.464547Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-10-24T16:28:54.464557Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:835: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [9:800:2647] 2025-10-24T16:28:54.464563Z node 9 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TRestoreTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnSingleShardTable[Zstd] >> TImportTests::CancelUponGettingSchemeShouldSucceed >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile >> TRestoreTests::ExportImportWithChecksums[Raw] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 11824, MsgBus: 16085 2025-10-24T16:28:26.280259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826000274353126:2157];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:26.280297Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017e3/r3tmp/tmpkwu6K9/pdisk_1.dat 2025-10-24T16:28:26.351903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:26.370772Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:26.371127Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826000274352977:2081] 1761323306276828 != 1761323306276831 TServer::EnableGrpc on GrpcPort 11824, node 1 2025-10-24T16:28:26.381151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:26.381167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:26.381174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:26.381224Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:26.383286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:26.383311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:26.385115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16085 TClient is connected to server localhost:16085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:26.449069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:26.453239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:26.461172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:26.501657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:26.538025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:26.555519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:26.580388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:26.852150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826000274354625:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.852191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.852360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826000274354634:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.852373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:26.912683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.924260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.938737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.951380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:26.975246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:27.005646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:27.035960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:27.066444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:27.108101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826004569322790:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:27.108141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:27.108334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826004569322795:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:27.108343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826004569322796:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:27.108347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... t}. Database not set, use /Root 2025-10-24T16:28:54.231739Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714371. Ctx: { TraceId: 01k8bgtejn340xm6c4mwkfkztk, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.232783Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714372. Ctx: { TraceId: 01k8bgtejgfnx45mqfnhgtacgb, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.232841Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714373. Ctx: { TraceId: 01k8bgtejn340xm6c4mwkfkztk, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.233150Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714374. Ctx: { TraceId: 01k8bgtejrb68pchaq3wx9sm4v, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.236307Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714376. Ctx: { TraceId: 01k8bgtejv74cxqzwp2cpyhb6r, Database: , SessionId: ydb://session/3?node_id=3&id=YzY4ZGI0MDEtM2M4MWE1OTMtMzdkZjUwYTEtNWVjNWM1ZjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.236347Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714375. Ctx: { TraceId: 01k8bgtejv6v1b3mq77wnd9x37, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.236629Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714377. Ctx: { TraceId: 01k8bgtejv1kax99c0bwczhy89, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.237810Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714378. Ctx: { TraceId: 01k8bgtejv6v1b3mq77wnd9x37, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.237861Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714379. Ctx: { TraceId: 01k8bgtejv74cxqzwp2cpyhb6r, Database: , SessionId: ydb://session/3?node_id=3&id=YzY4ZGI0MDEtM2M4MWE1OTMtMzdkZjUwYTEtNWVjNWM1ZjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.237926Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714380. Ctx: { TraceId: 01k8bgtejv1kax99c0bwczhy89, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.238673Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714381. Ctx: { TraceId: 01k8bgtejv6v1b3mq77wnd9x37, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.240897Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714382. Ctx: { TraceId: 01k8bgtejz7mz4hv3mr5df8szx, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.241482Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714383. Ctx: { TraceId: 01k8bgtejze2skdhh4qrywkw5j, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.243512Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714384. Ctx: { TraceId: 01k8bgtek21gm51g4apazxat6q, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.244188Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714385. Ctx: { TraceId: 01k8bgtejze2skdhh4qrywkw5j, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.249925Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714386. Ctx: { TraceId: 01k8bgtek9dfw2yaakserdwk51, Database: , SessionId: ydb://session/3?node_id=3&id=YzY4ZGI0MDEtM2M4MWE1OTMtMzdkZjUwYTEtNWVjNWM1ZjA=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.251341Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714387. Ctx: { TraceId: 01k8bgtek92dk9k4325ewavd2x, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.252643Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714388. Ctx: { TraceId: 01k8bgtekab64rmhthz6qp0ggp, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.253994Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714390. Ctx: { TraceId: 01k8bgtekac6ry1ptvsjvw0yms, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.254392Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714389. Ctx: { TraceId: 01k8bgtek8ajb4f5gwb8hjqgcd, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.255424Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714391. Ctx: { TraceId: 01k8bgtekac6ry1ptvsjvw0yms, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.255752Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714392. Ctx: { TraceId: 01k8bgtek8ajb4f5gwb8hjqgcd, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.257090Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714393. Ctx: { TraceId: 01k8bgtekf646qpe91grfasynv, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.257601Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714394. Ctx: { TraceId: 01k8bgtekfcmd7kj55q9xjgkq9, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.257629Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714395. Ctx: { TraceId: 01k8bgtekf58x7hwgwc776xvyc, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.259240Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714396. Ctx: { TraceId: 01k8bgtekf58x7hwgwc776xvyc, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.259671Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714397. Ctx: { TraceId: 01k8bgtekfcmd7kj55q9xjgkq9, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.260557Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714398. Ctx: { TraceId: 01k8bgtekf58x7hwgwc776xvyc, Database: , SessionId: ydb://session/3?node_id=3&id=N2QxZjQ3My0yMzQ1MTQ4ZS00MWFkZGVjZC0yMDFiMGI1MQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS 2025-10-24T16:28:54.262078Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714399. Ctx: { TraceId: 01k8bgtekmfaydr0neh4wv4k9m, Database: , SessionId: ydb://session/3?node_id=3&id=MWEzMGIxNzUtYWUxMDEwYWMtYzFhNzY5NGQtYjk2ZWIxNDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.262148Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714400. Ctx: { TraceId: 01k8bgtekm5wn95bgrvmky1qpy, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.262754Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714401. Ctx: { TraceId: 01k8bgtekm7yhcsepmsk6sq7hq, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.263545Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714402. Ctx: { TraceId: 01k8bgtekm5wn95bgrvmky1qpy, Database: , SessionId: ydb://session/3?node_id=3&id=Y2I4NTFhNzctNDNkOWRkYmQtNDE2Njk5ZDctODJmYTM4YWQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.263555Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714404. Ctx: { TraceId: 01k8bgtekm7yhcsepmsk6sq7hq, Database: , SessionId: ydb://session/3?node_id=3&id=NGQ0NzdjYzktY2MwZDVlYWEtMTFkNzJlY2QtZGI0Yjk3ZWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.264336Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714403. Ctx: { TraceId: 01k8bgtekmfaydr0neh4wv4k9m, Database: , SessionId: ydb://session/3?node_id=3&id=MWEzMGIxNzUtYWUxMDEwYWMtYzFhNzY5NGQtYjk2ZWIxNDI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS 2025-10-24T16:28:54.266754Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714405. Ctx: { TraceId: 01k8bgteks1j0k0sfpv92vkggc, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS 2025-10-24T16:28:54.267076Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714406. Ctx: { TraceId: 01k8bgteksega5ckhpbsxq85m3, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.267797Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714407. Ctx: { TraceId: 01k8bgteks1j0k0sfpv92vkggc, Database: , SessionId: ydb://session/3?node_id=3&id=MzQ1MDFmODgtZDg5YjUwYmItMzU1NGU2OGItZGFhZDFkMzk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:54.267890Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976714408. Ctx: { TraceId: 01k8bgteksega5ckhpbsxq85m3, Database: , SessionId: ydb://session/3?node_id=3&id=YzZhOGUwZWQtMjAwODQ4YTYtOTgwYjA3MGQtNzA2MTk2Y2Y=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink >> TImportTests::CancelUponGettingSchemeShouldSucceed [GOOD] >> TImportTests::CancelUponTransferringShouldSucceed >> TRestoreTests::ExportImportWithChecksums[Zstd] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Raw] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> TRestoreTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch >> TImportTests::ImportStartTime >> TImportTests::CancelUponTransferringShouldSucceed [GOOD] >> TImportTests::CancelledImportEndTime ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithChecksums[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:53.417392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:53.417418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:53.417424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:53.417434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:53.417441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:53.417445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:53.417455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:53.417480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:53.417616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:53.417683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:53.433591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:53.433641Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:53.437277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:53.437310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:53.437338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:53.438431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:53.438572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:53.438658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.438716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:53.439426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:53.439481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:53.439693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:53.439700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:53.439728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:53.439734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:53.439740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:53.439763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.440885Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:53.462833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:53.462940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.463017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:53.463030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:53.463125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:53.463144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:53.472537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.472631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:53.472727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.472743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:53.472749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:53.472756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:53.473539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.473556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:53.473562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:53.474037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.474051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:53.474057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.474064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:53.474770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:53.475298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:53.475368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:53.475642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:53.475673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:53.475681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.475758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:53.475765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:53.475811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:53.475824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:53.476359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:53.476370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... rd__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 REQUEST: HEAD /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:5292 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 75850896-9F83-4D80-B4C6-E311C0E87409 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 2025-10-24T16:28:55.173248Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:737: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 0fc92210f4f4e12aa23b94360be8511d ContentLength: 76 } } 2025-10-24T16:28:55.173272Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv.sha256, range# 0-75 REQUEST: GET /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:5292 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B1F5069D-FEAC-4F75-96A2-DB55F070924D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-75 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 2025-10-24T16:28:55.173992Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:751: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 0fc92210f4f4e12aa23b94360be8511d Body: 76b } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:28:55.174603Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:55.185604Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } } 2025-10-24T16:28:55.185645Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } 2025-10-24T16:28:55.185667Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:5292 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FFEF2D06-0690-4A36-9343-1460331738CA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:28:55.186655Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-10-24T16:28:55.186672Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-10-24T16:28:55.186719Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-10-24T16:28:55.187369Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 88 Data: 1981951025 Data: 1702194273 Data: 664129 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 11 MdLen: 32 } DownloadState: } } 2025-10-24T16:28:55.187395Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 88 Data: 1981951025 Data: 1702194273 Data: 664129 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 11 MdLen: 32 } DownloadState: } 2025-10-24T16:28:55.187407Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:28:55.200295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 758 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:55.200323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-10-24T16:28:55.200348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 758 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:55.200359Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 758 RawX2: 12884904587 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:55.200373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:55.200376Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.200380Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:28:55.200385Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:28:55.200434Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:55.201024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.201087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.201097Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:28:55.201112Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:55.201116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:55.201121Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:55.201125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:55.201130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:28:55.201147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:28:55.201155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:55.201161Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:28:55.201166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:28:55.201197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:28:55.201681Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:28:55.201696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:28:55.201705Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:55.201709Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:28:55.202115Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:55.202141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:55.202148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:704:2654] TestWaitNotification: OK eventTxId 103 >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Raw] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] >> TRestoreTests::ExportImportOnSupportedDatatypes >> TRestoreTests::ExportImportWithMetadataCorruption >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch [GOOD] >> TImportTests::ImportStartTime [GOOD] >> TImportTests::IgnoreBasicSchemeLimits >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] >> TImportTests::ShouldRestoreColumnFamilies ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 11197, MsgBus: 21335 2025-10-24T16:28:32.292553Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826026084453713:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:32.292566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f5a/r3tmp/tmp1OSpDR/pdisk_1.dat 2025-10-24T16:28:32.335528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:32.346341Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:32.351959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826026084453689:2081] 1761323312292430 != 1761323312292433 TServer::EnableGrpc on GrpcPort 11197, node 1 2025-10-24T16:28:32.372261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:32.372277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:32.372279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:32.372328Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21335 2025-10-24T16:28:32.398104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:32.398140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:32.399732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:32.470058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:32.471355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:32.471704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:32.472100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-10-24T16:28:32.472750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323312518, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:32.472985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715657:0 2025-10-24T16:28:32.472987Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826026084454210:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 3 2025-10-24T16:28:32.472999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-10-24T16:28:32.473047Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026084453660:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.473050Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026084453657:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.473081Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826026084453663:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:32.473121Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026084454116:2199][/Root] Path was updated to new version: owner# [1:7564826026084453958:2109], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.473123Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826026084454210:2247] Ack update: ack to# [1:7564826026084454037:2149], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:32.473133Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026084454233:2288][/Root] Path was updated to new version: owner# [1:7564826026084454227:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.473151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-10-24T16:28:32.473182Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826026084454232:2287][/Root] Path was updated to new version: owner# [1:7564826026084454226:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.488030Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026084454302:2294][/Root/.metadata/result_sets] Set up state: owner# [1:7564826026084453958:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.488048Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026084454301:2293][/Root/.metadata/script_execution_leases] Set up state: owner# [1:7564826026084453958:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.488064Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026084454300:2292][/Root/.metadata/script_executions] Set up state: owner# [1:7564826026084453958:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.488160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:32.488171Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:146: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table Root/.metadata/script_executions not found 2025-10-24T16:28:32.761091Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:32.765206097 373645 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:32.765277651 373645 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:32.763956Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgssaxb2zppyqbe8b6rt3b", Request has 18444982750396.787669s seconds to be completed 2025-10-24T16:28:32.764739Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgssaxb2zppyqbe8b6rt3b", Created new session, sessionId: ydb://session/3?node_id=1&id=Y2RjNTJlZjktMTA5OTA4NWItMzk1MDYzMDYtMzhkMGY3NDc=, workerId: [1:7564826026084454340:2319], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:32.764788Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgssaxb2zppyqbe8b6rt3b 2025-10-24T16:28:32.765092Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026084454342:2296][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7564826026084453958:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.765152Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826026084454343:2297][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826026084453958:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:32.765698Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f5a/r3tmp/spilling-tmp-runner/node_1_3a7f0f0c-4f5f9fd0-eef60134-7deef304, actor: [1:7564826026084454322:2302] 2025-10-24T16:28:32.765813Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f5a/r3tmp/spilling-tmp-runner 2025-10-24T16:28:32.768600Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. ... } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:55.366411Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826125292064726:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtfnd877pmrq6s0d9pvx3, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGZkNmE0NWEtZjY0NjBmZGItNWYyZDJjMDAtODliMTE5YWU=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:55.367904Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtfnd877pmrq6s0d9pvx3, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGZkNmE0NWEtZjY0NjBmZGItNWYyZDJjMDAtODliMTE5YWU=, PoolId: default}. Compute actor has finished execution: [9:7564826125292064730:2614] 2025-10-24T16:28:55.367937Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtfnd877pmrq6s0d9pvx3, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGZkNmE0NWEtZjY0NjBmZGItNWYyZDJjMDAtODliMTE5YWU=, PoolId: default}. Compute actor has finished execution: [9:7564826125292064731:2615] 2025-10-24T16:28:55.368106Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtfnd877pmrq6s0d9pvx3", Forwarded response to sender actor, requestId: 34, sender: [9:7564826125292064707:2609], selfId: [9:7564826116702128460:2243], source: [9:7564826125292064708:2610] 2025-10-24T16:28:55.368307Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=ZGZkNmE0NWEtZjY0NjBmZGItNWYyZDJjMDAtODliMTE5YWU=, workerId: [9:7564826125292064708:2610], local sessions count: 0 2025-10-24T16:28:55.550959Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444982750374.000663s seconds to be completed 2025-10-24T16:28:55.551481Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=9&id=YjUwMjAyZi0xMGQzMzRkOC01NWMxNTc2Yy0yMzk4YjQ2ZA==, workerId: [9:7564826125292064739:2618], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:55.551514Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-10-24T16:28:55.551589Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=YjUwMjAyZi0xMGQzMzRkOC01NWMxNTc2Yy0yMzk4YjQ2ZA==, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 36, targetId: [9:7564826125292064739:2618] 2025-10-24T16:28:55.551598Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 36 timeout: 300.000000s actor id: [9:7564826125292064741:2910] >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> TImportTests::ChangefeedsWithPartitioning >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> TRestoreTests::ExportImportWithDataChecksumAbsence[Raw] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] >> TRestoreTests::ShouldFailOnOutboundKey[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnMultipleFramesTinyBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:54.944767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:54.944793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:54.944798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:54.944805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:54.944811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:54.944815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:54.944823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:54.944849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:54.944964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:54.945032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:54.960372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:54.960400Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:54.963702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:54.963736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:54.963765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:54.965463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:54.965654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:54.965739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:54.965802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:54.967112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:54.967160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:54.967389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:54.967400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:54.967446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:54.967515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:54.967531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:54.967566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.971001Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:54.990962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:54.991037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.991098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:54.991106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:54.991171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:54.991185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:54.991898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:54.991939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:54.992004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.992014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:54.992019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:54.992024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:54.992477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.992490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:54.992499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:54.992887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.992898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:54.992903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:54.992909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:54.993592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:54.993991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:54.994027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:54.994215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:54.994240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:54.994248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:54.994308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:54.994316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:54.994359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:54.994371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:54.994852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:54.994862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DE99ABAC-991E-4628-B39C-C9C5E57CD33F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=54-54 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.013142Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.013155Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.013166Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 55-55 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 792ED8A8-BF05-475F-ACD2-D87356A803FC amz-sdk-request: attempt=1 content-type: application/xml range: bytes=55-55 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.013650Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.013662Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.013672Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 56-56 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 88136440-8BF4-4CDA-AB9E-60C51FCEEA4C amz-sdk-request: attempt=1 content-type: application/xml range: bytes=56-56 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.014065Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.014076Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.014085Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 57-57 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50E59F35-00FA-43C3-999B-1505F39C6AB1 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=57-57 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.014556Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.014567Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.014577Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 58-58 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 90684EB5-AEF1-4F75-AFC8-CBC57450BB64 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=58-58 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.014944Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.014954Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.014963Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 59-59 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:27123 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C714C0FF-BCF7-48C4-AB2A-F16E66DD731A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=59-59 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:28:56.015317Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 1b } 2025-10-24T16:28:56.015331Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 1 2025-10-24T16:28:56.015367Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:28:56.016063Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:56.016079Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:56.016088Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 24, writtenRows# 3 2025-10-24T16:28:56.028186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:28:56.028207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:56.028227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:28:56.028239Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:28:56.028250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.028253Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.028257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:56.028262Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:28:56.028298Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.028743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.028779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.028785Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:56.028800Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:56.028805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:56.028812Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:56.028836Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:56.028842Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:56.028854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:56.028861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:56.028865Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:56.028869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:56.028889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:56.029320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:56.029331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: 2025-10-24T16:28:44.149308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:44.191841Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:44.191944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:44.191955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002786/r3tmp/tmpNTbqFO/pdisk_1.dat 2025-10-24T16:28:44.279084Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.280132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:44.280167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:44.280279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323323727685 != 1761323323727688 2025-10-24T16:28:44.311238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:44.364442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:44.413391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:44.488561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:44.503647Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:44.503912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:44.503987Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:28:44.504041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:44.512394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:44.512575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:44.512600Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:44.512763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:28:44.512771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:28:44.512778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:28:44.512836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:44.512858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:44.512868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:28:44.523171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:44.527841Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:28:44.529186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:44.529215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:28:44.529221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:28:44.529226Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:28:44.529232Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:44.529297Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:44.529304Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:44.529420Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:28:44.529446Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:28:44.529463Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:44.529475Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:44.529482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:28:44.529487Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:44.529491Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:44.529497Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:28:44.529502Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:44.529514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:44.529520Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:44.529526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:28:44.529616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:28:44.529622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:28:44.529642Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:28:44.529694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:28:44.529706Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:28:44.529726Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:28:44.529734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:44.529739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:28:44.529746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:28:44.529751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:44.529805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:28:44.529810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:28:44.529814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:28:44.529819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:28:44.529830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:28:44.529836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:28:44.529843Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:28:44.529849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:28:44.529854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:28:44.530146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:28:44.530156Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:28:44.540463Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:28:44.540495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:44.540503Z n ... ode 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [9:710:2585], Recipient [9:956:2761]: {TEvReadSet step# 2001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-10-24T16:28:55.949805Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:55.949810Z node 9 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2025-10-24T16:28:55.977341Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [9:1008:2800], Recipient [9:956:2761]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:55.977372Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:55.977382Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [9:1007:2799], serverId# [9:1008:2800], sessionId# [0:0:0] 2025-10-24T16:28:55.988042Z node 9 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgtg8eaer6fcztg8f9z84a, Database: , SessionId: ydb://session/3?node_id=9&id=MWI2NmM2Y2YtMjY4ODc0ZGEtMWVkMWFjNjAtZGI2NTJjNTg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:55.988966Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [9:1014:2803], Recipient [9:956:2761]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-10-24T16:28:55.989032Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:28:55.989054Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-10-24T16:28:55.989078Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:28:55.989085Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:28:55.989092Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:55.989097Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:28:55.989114Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-10-24T16:28:55.989121Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:28:55.989124Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:55.989130Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:28:55.989134Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:28:55.989153Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-10-24T16:28:55.989213Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2001/18446744073709551615 2025-10-24T16:28:55.989222Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[9:1014:2803], 0} after executionsCount# 1 2025-10-24T16:28:55.989231Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[9:1014:2803], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:55.989248Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[9:1014:2803], 0} finished in read 2025-10-24T16:28:55.989259Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:28:55.989263Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:28:55.989267Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:55.989272Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:55.989285Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037888 is Executed 2025-10-24T16:28:55.989291Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:55.989296Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037888 has finished 2025-10-24T16:28:55.989302Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:28:55.989327Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:28:55.989590Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [9:1014:2803], Recipient [9:710:2585]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-10-24T16:28:55.989619Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [9:1014:2803], Recipient [9:956:2761]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:28:55.989628Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:28:55.989675Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-10-24T16:28:55.989686Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-10-24T16:28:55.989696Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:55.989700Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-10-24T16:28:55.989704Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:55.989709Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:28:55.989717Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-10-24T16:28:55.989723Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:55.989727Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:55.989730Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-10-24T16:28:55.989734Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-10-24T16:28:55.989748Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-10-24T16:28:55.989777Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2001/18446744073709551615 2025-10-24T16:28:55.989784Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[9:1014:2803], 1} after executionsCount# 1 2025-10-24T16:28:55.989790Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[9:1014:2803], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:55.989802Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[9:1014:2803], 1} finished in read 2025-10-24T16:28:55.989810Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:55.989813Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-10-24T16:28:55.989817Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:28:55.989822Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:28:55.989828Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:55.989832Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:28:55.989836Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037889 has finished 2025-10-24T16:28:55.989840Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-10-24T16:28:55.989853Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-10-24T16:28:55.989977Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [9:1014:2803], Recipient [9:710:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-10-24T16:28:55.989985Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile >> TImportTests::ShouldRestoreColumnFamilies [GOOD] >> TImportTests::ShouldRestoreAttributes >> TRestoreTests::ShouldRestoreSpecialFpValues ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 9062, MsgBus: 15521 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f27/r3tmp/tmpk5GTHD/pdisk_1.dat 2025-10-24T16:28:34.708611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:34.708654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:34.713973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:34.715441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:34.715542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:34.716433Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:34.719194Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826032628547421:2081] 1761323314434749 != 1761323314434752 TServer::EnableGrpc on GrpcPort 9062, node 1 2025-10-24T16:28:34.752120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:34.752136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:34.752138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:34.752188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15521 2025-10-24T16:28:34.916418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:35.047837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:35.049136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:35.049593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:35.049833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:35.050385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323315094, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:35.050619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710657:0 2025-10-24T16:28:35.050636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-10-24T16:28:35.050686Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826032628547951:2255] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 3 2025-10-24T16:28:35.050743Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628547395:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.050793Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628547389:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.050805Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628547392:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.050811Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628547918:2232][/Root] Path was updated to new version: owner# [1:7564826032628547613:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.050835Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826032628547951:2255] Ack update: ack to# [1:7564826032628547791:2159], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:35.050862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-10-24T16:28:35.052652Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628547970:2288][/Root] Path was updated to new version: owner# [1:7564826032628547964:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.052733Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628547969:2287][/Root] Path was updated to new version: owner# [1:7564826032628547963:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.053064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:35.394697Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:35.395164Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f27/r3tmp/spilling-tmp-runner/node_1_737ef448-6c6f20f1-1014bc14-d61d7026, actor: [1:7564826036923515350:2302] 2025-10-24T16:28:35.395286Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f27/r3tmp/spilling-tmp-runner E1024 16:28:35.395834124 374338 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.396158107 374338 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.397403Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsvvm2s0apr6dqpsg4yqm", Request has 18444982750394.154223s seconds to be completed 2025-10-24T16:28:35.398162Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsvvm2s0apr6dqpsg4yqm", Created new session, sessionId: ydb://session/3?node_id=1&id=M2U5NTBlNi1iMGNiZDYzMy1iY2E5MzhhNi1hNGU1Y2UwMA==, workerId: [1:7564826036923515369:2320], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:35.398202Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsvvm2s0apr6dqpsg4yqm 2025-10-24T16:28:35.398216Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:35.398221Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:35.398229Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:35.401713451 374337 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.401766691 374337 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.402123201 374337 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.402145042 374337 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.403385347 374338 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.403408676 374338 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.402690Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923515373:2300][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7564826032628547613:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.402727Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923515374:2301][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826032628547613:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.402746Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923515375:2302][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: ... house { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:55.882224Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826123415061944:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtg5e0rkpw49tj4spn32g, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjhmMTYzZjgtMjZlNTQ3N2ItNjJiMGQ5ODMtYzRjYTIyYzc=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:55.883454Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtg5e0rkpw49tj4spn32g, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjhmMTYzZjgtMjZlNTQ3N2ItNjJiMGQ5ODMtYzRjYTIyYzc=, PoolId: default}. Compute actor has finished execution: [9:7564826123415061948:2614] 2025-10-24T16:28:55.883518Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtg5e0rkpw49tj4spn32g, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MjhmMTYzZjgtMjZlNTQ3N2ItNjJiMGQ5ODMtYzRjYTIyYzc=, PoolId: default}. Compute actor has finished execution: [9:7564826123415061949:2615] 2025-10-24T16:28:55.883747Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtg5e0rkpw49tj4spn32g", Forwarded response to sender actor, requestId: 34, sender: [9:7564826123415061925:2609], selfId: [9:7564826119120093003:2265], source: [9:7564826123415061926:2610] 2025-10-24T16:28:55.883960Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=MjhmMTYzZjgtMjZlNTQ3N2ItNjJiMGQ5ODMtYzRjYTIyYzc=, workerId: [9:7564826123415061926:2610], local sessions count: 0 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 24502, MsgBus: 61341 2025-10-24T16:28:34.986522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826033234780739:2246];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:34.986537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:35.012477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f34/r3tmp/tmpG91Bdi/pdisk_1.dat 2025-10-24T16:28:35.186340Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:35.188815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:35.188839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:35.189080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826033234780530:2081] 1761323314982508 != 1761323314982511 2025-10-24T16:28:35.192291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24502, node 1 2025-10-24T16:28:35.220306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:35.220318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:35.220319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:35.220362Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:35.256849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61341 TClient is connected to server localhost:61341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:35.410699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:35.412408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:35.412750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:35.413024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:35.413645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323315458, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:35.413902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710657:0 2025-10-24T16:28:35.413910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-10-24T16:28:35.413948Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826037529748355:2252] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 3 2025-10-24T16:28:35.414040Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033234780498:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.414077Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033234780501:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.414087Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033234780504:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.414140Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826037529748396:2265][/Root] Path was updated to new version: owner# [1:7564826037529748119:2121], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.414223Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826037529748355:2252] Ack update: ack to# [1:7564826037529748183:2150], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:35.414264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-10-24T16:28:35.414589Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826037529748373:2288][/Root] Path was updated to new version: owner# [1:7564826037529748367:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.414632Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826037529748372:2287][/Root] Path was updated to new version: owner# [1:7564826037529748366:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.415148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:35.743921Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:35.747591322 374591 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.747682032 374591 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.747998361 374592 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.748019227 374592 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.748428177 374591 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.748451288 374591 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.748599Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f34/r3tmp/spilling-tmp-runner/node_1_b00247c-f543ba25-c2367462-5b1c55bf, actor: [1:7564826037529748464:2302] 2025-10-24T16:28:35.748782Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f34/r3tmp/spilling-tmp-runner 2025-10-24T16:28:35.748973Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsw725qwk6x0awkk2rjhy", Request has 18444982750393.802649s seconds to be completed E1024 16:28:35.749607876 374592 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.749630975 374592 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.749965Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsw725qwk6x0awkk2rjhy", Created new session, sessionId: ydb://session/3?node_id=1&id=YTlhMmZjNmUtYWU0YTBmYTYtMTc1MzUtM2RlMDM1MTk=, workerId: [1:7564826037529748485:2322], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:35.750021Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsw725qwk6x0awkk2rjhy 2025-10-24T16:28:35.750041Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:35.750051Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:35.750060Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:35.750354Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-10-24T16:28:35.753492Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826037529748489:2298][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826037529748119:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.753495Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826037529748490:2299][/Root/.metadata/work ... { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:56.065317Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826129860996904:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgb8aynja438ehqyymxh, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OWU1OTNmODktNGY1ZmYyZDAtNzYyMmJlYTgtMmYxYTdkYjY=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:56.068339Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgb8aynja438ehqyymxh, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OWU1OTNmODktNGY1ZmYyZDAtNzYyMmJlYTgtMmYxYTdkYjY=, PoolId: default}. Compute actor has finished execution: [9:7564826129860996908:2614] 2025-10-24T16:28:56.068506Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgb8aynja438ehqyymxh, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OWU1OTNmODktNGY1ZmYyZDAtNzYyMmJlYTgtMmYxYTdkYjY=, PoolId: default}. Compute actor has finished execution: [9:7564826129860996909:2615] 2025-10-24T16:28:56.068697Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtgb8aynja438ehqyymxh", Forwarded response to sender actor, requestId: 34, sender: [9:7564826129860996885:2609], selfId: [9:7564826121271060580:2187], source: [9:7564826129860996886:2610] 2025-10-24T16:28:56.068935Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=OWU1OTNmODktNGY1ZmYyZDAtNzYyMmJlYTgtMmYxYTdkYjY=, workerId: [9:7564826129860996886:2610], local sessions count: 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::AlterReplicationConfig [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:122:2058] recipient: [1:116:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:122:2058] recipient: [1:116:2146] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:135:2058] recipient: [1:115:2145] Leader for TabletID 72057594046316545 is [1:137:2159] sender: [1:139:2058] recipient: [1:116:2146] 2025-10-24T16:28:42.125829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:42.125859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:42.125864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:42.125872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:42.125879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:42.125884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:42.125894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:42.125907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:42.126064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:42.126135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:42.150448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:28:42.150519Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:42.150636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2157] sender: [1:188:2058] recipient: [1:15:2062] 2025-10-24T16:28:42.153762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:42.153838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:42.153894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:42.155156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:42.155228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:42.155362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:42.155473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:42.155890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:42.155939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:42.156263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:42.156273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:42.156306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:42.156314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:42.156321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:42.156339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2216] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2216] Leader for TabletID 72057594037968897 is [1:223:2220] sender: [1:224:2058] recipient: [1:217:2216] 2025-10-24T16:28:42.157807Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:28:42.180741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:42.180854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:42.180931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:42.180938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:42.180988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:42.181001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:42.182272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:42.182331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:42.182400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:42.182412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:42.182418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:42.182423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:42.183008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:42.183025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:42.183031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:42.184742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:42.184758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:42.184765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:42.184774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:42.185523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:42.185937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:42.185984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:137:2159] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:42.186232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:42.186261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 Tab ... Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-10-24T16:28:56.008982Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:56.008993Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 341 RawX2: 219043334422 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-10-24T16:28:56.008999Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:28:56.009007Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 341 RawX2: 219043334422 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-10-24T16:28:56.009022Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009026Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009031Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009036Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 129 -> 240 2025-10-24T16:28:56.009057Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:28:56.009090Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:28:56.009110Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:28:56.009114Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:28:56.009706Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009716Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:28:56.009763Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009767Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:28:56.009771Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1003:0 2025-10-24T16:28:56.009785Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [51:341:2326] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1003 at schemeshard: 72057594046678944 2025-10-24T16:28:56.009844Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [51:131:2155], Recipient [51:131:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:28:56.009848Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-10-24T16:28:56.009854Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.009860Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-10-24T16:28:56.009872Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:28:56.009877Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-10-24T16:28:56.009884Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:28:56.009889Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-10-24T16:28:56.009892Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:28:56.009897Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-10-24T16:28:56.009906Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [51:415:2386] message: TxId: 1003 2025-10-24T16:28:56.009911Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:28:56.009917Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-10-24T16:28:56.009921Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1003:0 2025-10-24T16:28:56.009944Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:28:56.010602Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [51:455:2426], Recipient [51:131:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:28:56.010613Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:28:56.010617Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 2025-10-24T16:28:56.010625Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:28:56.010639Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [51:415:2386] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-10-24T16:28:56.010663Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:28:56.010670Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:420:2391] 2025-10-24T16:28:56.010693Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [51:423:2394], Recipient [51:131:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:28:56.010697Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:28:56.010700Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-10-24T16:28:56.010768Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [51:467:2438], Recipient [51:131:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:28:56.010773Z node 51 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:28:56.010785Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:56.010846Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 56us result status StatusSuccess 2025-10-24T16:28:56.010966Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 63381, MsgBus: 11231 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f2d/r3tmp/tmpvnIq78/pdisk_1.dat 2025-10-24T16:28:34.881448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:34.881489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:34.886420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63381, node 1 2025-10-24T16:28:34.936237Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:34.940616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:28:34.940660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:34.955006Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826032628194188:2081] 1761323314665417 != 1761323314665420 2025-10-24T16:28:34.955360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:34.955362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:34.955364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:34.955409Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11231 2025-10-24T16:28:35.108398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:35.152395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:35.153702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:35.154300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:35.154550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:35.155108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323315199, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:35.155301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715657:0 2025-10-24T16:28:35.155311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-10-24T16:28:35.155346Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826032628194727:2256] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 3 2025-10-24T16:28:35.155391Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628194159:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.155428Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628194162:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.155496Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628194669:2220][/Root] Path was updated to new version: owner# [1:7564826032628194375:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.155568Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826032628194727:2256] Ack update: ack to# [1:7564826032628194568:2162], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:35.155608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2025-10-24T16:28:35.155676Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826032628194156:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.156034Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628194748:2288][/Root] Path was updated to new version: owner# [1:7564826032628194741:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } waiting... 2025-10-24T16:28:35.156085Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826032628194747:2287][/Root] Path was updated to new version: owner# [1:7564826032628194740:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.166411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:35.602272Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:35.602927Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f2d/r3tmp/spilling-tmp-runner/node_1_1aa5ec42-da9e41f3-e4c6e3fd-2fb6962a, actor: [1:7564826036923162127:2302] 2025-10-24T16:28:35.603021Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f2d/r3tmp/spilling-tmp-runner E1024 16:28:35.603799653 374392 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.603894640 374392 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.605764Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsvz869gwet5mcc86yg02", Request has 18444982750393.945862s seconds to be completed 2025-10-24T16:28:35.606531Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsvz869gwet5mcc86yg02", Created new session, sessionId: ydb://session/3?node_id=1&id=MzYwZTc5MGYtYjgxOWUyMTktMTJmZmUyNTMtNjRhYTUxYTY=, workerId: [1:7564826036923162146:2320], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:35.606588Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsvz869gwet5mcc86yg02 2025-10-24T16:28:35.606610Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:35.606619Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:35.606628Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:35.607203400 374393 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.607253723 374393 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.607622312 374393 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.607645058 374393 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.608104Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923162149:2302][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7564826032628194375:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1024 16:28:35.608318485 374392 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.608350319 374392 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.608130Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923162150:2303][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826032628194375:2115], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.611125Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826036923162166:2305][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state ... catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:56.041191Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826130089841800:2610] TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgad0kzkx9zr2sq8hmm9, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MWU5OWZjZjYtZmMxYzM3YzUtZThhZjgyM2ItYTMxMmVhNGY=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:56.042442Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgad0kzkx9zr2sq8hmm9, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MWU5OWZjZjYtZmMxYzM3YzUtZThhZjgyM2ItYTMxMmVhNGY=, PoolId: default}. Compute actor has finished execution: [9:7564826130089841804:2614] 2025-10-24T16:28:56.042543Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710691. Ctx: { TraceId: 01k8bgtgad0kzkx9zr2sq8hmm9, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MWU5OWZjZjYtZmMxYzM3YzUtZThhZjgyM2ItYTMxMmVhNGY=, PoolId: default}. Compute actor has finished execution: [9:7564826130089841805:2615] 2025-10-24T16:28:56.042675Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgtgad0kzkx9zr2sq8hmm9", Forwarded response to sender actor, requestId: 34, sender: [9:7564826130089841781:2609], selfId: [9:7564826121499905494:2204], source: [9:7564826130089841782:2610] 2025-10-24T16:28:56.042811Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=MWU5OWZjZjYtZmMxYzM3YzUtZThhZjgyM2ItYTMxMmVhNGY=, workerId: [9:7564826130089841782:2610], local sessions count: 0 >> TRestoreTests::ExportImportOnSupportedDatatypes [GOOD] >> TRestoreTests::ExportImportDecimalKey >> TRestoreTests::ExportImportWithMetadataCorruption [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumCorruption >> TImportTests::IgnoreBasicSchemeLimits [GOOD] >> TImportTests::CorruptedTopicImport >> TRestoreTests::ShouldFailOnOutboundKey[Raw] [GOOD] >> TRestoreTests::ShouldFailOnOutboundKey[Zstd] >> TImportTests::ChangefeedsWithPartitioning [GOOD] >> TImportTests::ChangefeedsExportRestore >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] [GOOD] >> TImportTests::ShouldRestoreAttributes [GOOD] >> TImportTests::ShouldRestoreIncrementalBackupFlag >> TImportTests::CancelUponCreatingTableShouldSucceed >> TImportTests::CorruptedTopicImport [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumCorruption [GOOD] >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence >> TRestoreTests::ShouldFailOnOutboundKey[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnVariousErrors >> TRestoreTests::ShouldRestoreSpecialFpValues [GOOD] >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Zstd] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningMerge >> TRestoreTests::ExportImportDecimalKey [GOOD] >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> TImportTests::CancelUponCreatingTableShouldSucceed [GOOD] >> TImportTests::CancelUponBuildingIndicesShouldSucceed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithDataChecksumAbsence[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:55.196234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:55.196260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:55.196266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:55.196275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:55.196282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:55.196287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:55.196297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:55.196324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:55.196450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:55.196513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:55.210606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:55.210626Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:55.213224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:55.213252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:55.213283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:55.214687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:55.214894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:55.215019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:55.215110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:55.216082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:55.216126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:55.216372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:55.216380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:55.216414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:55.216422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:55.216429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:55.216455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.217779Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:55.240889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:55.240989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.241067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:55.241077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:55.241155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:55.241173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:55.242082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:55.242132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:55.242211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.242224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:55.242231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:55.242237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:55.242785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.242798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:55.242807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:55.243227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.243239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:55.243245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:55.243253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:55.243993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:55.244495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:55.244555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:55.244817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:55.244846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:55.244855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:55.244933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:55.244941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:55.244988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:55.245003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:55.245530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:55.245541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72057594046678944 2025-10-24T16:28:56.971197Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.971298Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-10-24T16:28:56.971313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-10-24T16:28:56.971319Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-10-24T16:28:56.971325Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-10-24T16:28:56.971332Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-10-24T16:28:56.971381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2025-10-24T16:28:56.971441Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:28:56.971452Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:281474976710765] HeadObject: key# /data_00.csv.zst REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:29912 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E33A2EE9-0A56-429A-8B1F-98A1AE180F5D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-10-24T16:28:56.971987Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f ContentLength: 20 } } 2025-10-24T16:28:56.972362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2025-10-24T16:28:56.993371Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710765 2025-10-24T16:28:57.005558Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.005577Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710765] Process download info at 'DownloadInfo': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.005595Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710765] GetObject: key# /data_00.csv.zst, range# 0-19 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:29912 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8F9B77DC-0424-4FD5-89CC-F0A74ED85E8F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-19 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-10-24T16:28:57.006552Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: daa167f46d135bcc1fbc9f42f80e496f Body: 20b } 2025-10-24T16:28:57.006566Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710765] Content size: processed-bytes# 0, content-length# 20, body-size# 20 2025-10-24T16:28:57.006647Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710765] Upload rows: count# 1, size# 36 2025-10-24T16:28:57.007246Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409549 Status: 0 Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.007261Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710765] Process download info at 'UploadResponse': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.007266Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710765] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:28:57.019918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 914 RawX2: 12884904726 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.019943Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-10-24T16:28:57.019979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 914 RawX2: 12884904726 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.019999Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 914 RawX2: 12884904726 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.020017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.020023Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.020029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-10-24T16:28:57.020038Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710765:0 129 -> 240 2025-10-24T16:28:57.020089Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:57.020710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.020758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.020768Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-10-24T16:28:57.020786Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-10-24T16:28:57.020792Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:28:57.020801Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-10-24T16:28:57.020805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:28:57.020810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-10-24T16:28:57.020850Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710765 2025-10-24T16:28:57.020860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:28:57.020870Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710765:0 2025-10-24T16:28:57.020875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710765:0 2025-10-24T16:28:57.020911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:28:57.021411Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-10-24T16:28:57.021428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710765 2025-10-24T16:28:57.021440Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:57.021447Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710765 2025-10-24T16:28:57.021883Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:57.021906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.021914Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:876:2806] TestWaitNotification: OK eventTxId 104 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::CorruptedTopicImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:55.976761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:55.976788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:55.976794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:55.976801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:55.976807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:55.976812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:55.976822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:55.976854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:55.976992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:55.977059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:55.993721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:55.993743Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:55.997024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:55.997055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:55.997084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:55.998408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:55.998643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:55.998751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:55.998825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.000599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.000650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.000908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.000918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.000960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.000968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.000974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.001002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.002421Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.015694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.015774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.015828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.015837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.015920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.015931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.016460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.016498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.016545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.016552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.016556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.016560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.016909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.016921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.016929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.017278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.017288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.017294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.017301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.017924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.018338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.018386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.018578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.018602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.018610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.018673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.018680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.018727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.018739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.019170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.019178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... metadata.json HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AEB9B896-E16C-403D-9320-D9027F5DC50D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json / 14 2025-10-24T16:28:57.133463Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:334: HandleMetadata TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# HeadObjectResult { ETag: fce8ed52ad0bc010cd9a8dd97f615e98 ContentLength: 14 } REQUEST: GET /Topic_0/metadata.json HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7D9C9E7-6019-4595-B834-64A6BACD9AEA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json / 14 2025-10-24T16:28:57.133969Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:433: HandleMetadata TEvExternalStorage::TEvGetObjectResponse: self# [3:290:2161], result# fce8ed52ad0bc010cd9a8dd97f615e98 2025-10-24T16:28:57.133976Z node 3 :IMPORT TRACE: schemeshard_import_getters.cpp:449: Trying to parse metadata: self# [3:290:2161], body# {"version": 1} REQUEST: HEAD /Topic_0/metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 891D2B51-2173-4D95-8E78-0B7DDDDA5C59 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json.sha256 / 64 2025-10-24T16:28:57.138062Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:215: HandleChecksum TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# HeadObjectResult { ETag: a229a38155565a4ea4490a883e3d659d ContentLength: 64 } REQUEST: GET /Topic_0/metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5AF6C4BE-F239-4832-A1B3-C2EDE56DBA1E amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-63 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/metadata.json.sha256 / 64 2025-10-24T16:28:57.139132Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:230: HandleChecksum TEvExternalStorage::TEvGetObjectResponse: self# [3:290:2161], result# a229a38155565a4ea4490a883e3d659d REQUEST: HEAD /Topic_0/scheme.pb HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 565EDCB0-3783-4B41-ADEE-12DE4FEA14C3 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:57.143932Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# No response body. REQUEST: HEAD /Topic_0/create_view.sql HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 14BA32C1-9D25-4415-92DD-AC424F75F952 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:57.144477Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# No response body. REQUEST: HEAD /Topic_0/create_topic.pb HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1F917D75-53AC-4D2B-867D-1CF40BE4B16F amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb / 732 2025-10-24T16:28:57.145128Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:348: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# HeadObjectResult { ETag: 456e940d62d8f48fce4930e371868262 ContentLength: 732 } REQUEST: GET /Topic_0/create_topic.pb HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4AE18CCC-B3B4-4C51-9CA6-CD943415C2CC amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-731 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb / 732 2025-10-24T16:28:57.145667Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:477: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [3:290:2161], result# 456e940d62d8f48fce4930e371868262 2025-10-24T16:28:57.145685Z node 3 :IMPORT TRACE: schemeshard_import_getters.cpp:495: Trying to parse scheme: self# [3:290:2161], itemIdx# 0, schemeKey# /Topic_0/create_topic.pb, body# partitioning_settings {\n min_active_partitions: 1\n max_active_partitions: 1\n auto_partitioning_settings {\n strategy: AUTO_PARTITIONING_STRATEGY_DISABLED\n partition_write_speed {\n stabilization_window {\n seconds: 300\n }\n up_utilization_percent: 80\n down_utilization_percent: 20\n }\n }\n}\nretention_period {\n seconds: -1\n}\nsupported_codecs {\n}\npartition_write_speed_bytes_per_second: 50000000\npartition_write_burst_bytes: 50000000\nconsumers {\n name: "Consumer_0"\n read_from {\n }\n attributes {\n key: "_service_type"\n value: "data-streams"\n }\n}\nconsumers {\n name: "Consumer_1"\n important: true\n read_from {\n }\n attributes {\n key: "_service_type"\n value: "data-streams"\n }\n}\n REQUEST: HEAD /Topic_0/create_topic.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 65503AEE-4DA5-48F2-8250-DB5BA295E93C amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb.sha256 / 64 2025-10-24T16:28:57.150120Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:215: HandleChecksum TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# HeadObjectResult { ETag: 658c18dfe5698f886562b78a53cb7b5f ContentLength: 64 } REQUEST: GET /Topic_0/create_topic.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ACDF31C3-2298-4AB2-991A-D3393F1BF7E3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-63 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /Topic_0/create_topic.pb.sha256 / 64 2025-10-24T16:28:57.150787Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:230: HandleChecksum TEvExternalStorage::TEvGetObjectResponse: self# [3:290:2161], result# 658c18dfe5698f886562b78a53cb7b5f REQUEST: HEAD /Topic_0/permissions.pb HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7EDFFC3F-8A23-4927-AF6D-E2167CE51F83 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:57.157269Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:379: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [3:290:2161], result# No response body. REQUEST: GET /?prefix=%2FTopic_0%2F HTTP/1.1 HEADERS: Host: localhost:4109 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7A23BFCC-0DB9-4F02-846D-259A0C34FE13 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeList: /Topic_0/ 2025-10-24T16:28:57.164071Z node 3 :IMPORT DEBUG: schemeshard_import_getters.cpp:669: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [3:290:2161], result# ListObjectsResult { } 2025-10-24T16:28:57.164098Z node 3 :IMPORT INFO: schemeshard_import_getters.cpp:694: Reply: self# [3:290:2161], success# 1, error# 2025-10-24T16:28:57.164141Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:57.164149Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1000: TImport::TTxProgress: OnSchemeResult: id# 101, itemIdx# 0, success# 1 2025-10-24T16:28:57.164223Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:640: TImport::TTxProgress: Allocate txId: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' } 2025-10-24T16:28:57.167883Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:57.167931Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 101, at schemeshard: 72057594046678944 2025-10-24T16:28:57.167944Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:57.167951Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1171: TImport::TTxProgress: OnAllocateResult: txId# 281474976710757, id# 101 2025-10-24T16:28:57.167971Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:449: TImport::TTxProgress: CreateTopic propose: info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: '' }, txId# 281474976710757 2025-10-24T16:28:57.168041Z node 3 :IMPORT NOTICE: schemeshard_import__create.cpp:764: TImport::TTxProgress: creation topic failed, cancelling, info# { Id: 101 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored/Topic_0' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: /Topic_0 Issue: 'retention_period must be positive, provided seconds: -1 ' } 2025-10-24T16:28:57.168523Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:57.168549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.168557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 >> TImportTests::ShouldRestoreIncrementalBackupFlag [GOOD] >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] [GOOD] >> TImportTests::ChangefeedTablePrefixConflict ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 21048, MsgBus: 23545 2025-10-24T16:27:54.285016Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825863060751806:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.285033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020cd/r3tmp/tmpw4LFYt/pdisk_1.dat 2025-10-24T16:27:54.315749Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.323479Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.323658Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825863060751775:2081] 1761323274284751 != 1761323274284754 TServer::EnableGrpc on GrpcPort 21048, node 1 2025-10-24T16:27:54.335427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:54.335439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:54.335444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:54.335482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23545 TClient is connected to server localhost:23545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:54.388842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.388863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.389984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:54.393668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:54.402749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:54.529875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:27:54.605881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863060752800:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.605884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863060752789:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.605902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.605985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825863060752803:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.605990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:54.606749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:27:54.609036Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564825863060752804:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-10-24T16:27:54.699363Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564825863060752856:2561] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:27:54.768580Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976715661;task_id=2;memory=1048576; 2025-10-24T16:27:54.768601Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976715661, task: 2. [Mem] memory 1048576 NOT granted 2025-10-24T16:27:54.770706Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:678: SelfId: [1:7564825863060752898:2357], TxId: 281474976715661, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgrmbc2gcj5hae28vtzwxh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDU1MDBlYzctMjg0M2NhZDUtOGZhODdlY2ItN2I3MDZhZTg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-aakgzge3ui, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-10-24T16:27:54.767880Z }, code: 2029 }. 2025-10-24T16:27:54.770839Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [1:7564825863060752897:2356], TxId: 281474976715661, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgrmbc2gcj5hae28vtzwxh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDU1MDBlYzctMjg0M2NhZDUtOGZhODdlY2ItN2I3MDZhZTg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7564825863060752886:2340], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-10-24T16:27:54.771696Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=1&id=ZDU1MDBlYzctMjg0M2NhZDUtOGZhODdlY2ItN2I3MDZhZTg=, ActorId: [1:7564825863060752770:2340], ActorState: ExecuteState, TraceId: 01k8bgrmbc2gcj5hae28vtzwxh, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-aakgzge3ui, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976715661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-10-24T16:27:54.767880Z } , code: 2029 Trying to start YDB, gRPC: 1095, MsgBus: 11443 2025-10-24T16:27:54.977833Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564825860827193457:2148];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:54.977989Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020cd/r3tmp/tmptBVy52/pdisk_1.dat 2025-10-24T16:27:54.980405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:54.990935Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:54.991296Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564825860827193337:2081] 1761323274977225 != 1761323274977228 2025-10-24T16:27:54.994188Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:54.994215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:54.995255Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1095, node 2 2025-10-24T16:27:55.001535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.001547Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.00 ... =YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsnw4e76nyhjkxfethedd, Create QueryResponse for error on request, msg: 2025-10-24T16:28:29.104034Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7564826012008924758:2512] TxId: 281474976715720. Ctx: { TraceId: 01k8bgsnze32egy2yv0r06zr19, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 64ms } {
: Error: Cancelling after 64ms during execution } ] 2025-10-24T16:28:29.104119Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsnze32egy2yv0r06zr19, Create QueryResponse for error on request, msg: 2025-10-24T16:28:29.172126Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsp1j8w0qa39a18vh22yp, Create QueryResponse for error on request, msg: 2025-10-24T16:28:29.248054Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsp3r2rsdmsvfhh3385q2, Create QueryResponse for error on request, msg: 2025-10-24T16:28:29.372241Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7564826012008924849:2512] TxId: 281474976715725. Ctx: { TraceId: 01k8bgsp7r053qf0es49qdy2fs, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 68ms } {
: Error: Cancelling after 67ms during execution } ] 2025-10-24T16:28:29.372337Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsp7r053qf0es49qdy2fs, Create QueryResponse for error on request, msg: 2025-10-24T16:28:30.320528Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsq4xazr0vccg6s7zbq6m, Create QueryResponse for error on request, msg: 2025-10-24T16:28:30.439990Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7564825994829051576:2086];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:30.440015Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:28:30.501407Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsqac2swtwng4rngp57a5, Create QueryResponse for error on request, msg: 2025-10-24T16:28:30.776487Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsqk0eyh759fj9k8zcjvd, Create QueryResponse for error on request, msg: 2025-10-24T16:28:30.876369Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsqny8g6vnhtenya62nss, Create QueryResponse for error on request, msg: 2025-10-24T16:28:31.144406Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsqye33wyrvw3f56j9npa, Create QueryResponse for error on request, msg: 2025-10-24T16:28:31.243271Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7564826020598860592:2512] TxId: 281474976715768. Ctx: { TraceId: 01k8bgsr1d54ft1qkssxh6kvnt, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 93ms } {
: Error: Cancelling after 93ms during execution } ] 2025-10-24T16:28:31.243320Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860599:3216], TxId: 281474976715768, task: 4. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243436Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860600:3217], TxId: 281474976715768, task: 5. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243475Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860601:3218], TxId: 281474976715768, task: 6. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243537Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860602:3219], TxId: 281474976715768, task: 7. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243574Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860603:3220], TxId: 281474976715768, task: 8. Ctx: { TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243610Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860596:3213], TxId: 281474976715768, task: 1. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243647Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860597:3214], TxId: 281474976715768, task: 2. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243681Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860598:3215], TxId: 281474976715768, task: 3. Ctx: { TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243715Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7564826020598860604:3221], TxId: 281474976715768, task: 9. Ctx: { CheckpointId : . TraceId : 01k8bgsr1d54ft1qkssxh6kvnt. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7564826020598860592:2512], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-10-24T16:28:31.243868Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsr1d54ft1qkssxh6kvnt, Create QueryResponse for error on request, msg: 2025-10-24T16:28:34.280058Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7564826033483765058:2512] TxId: 281474976715862. Ctx: { TraceId: 01k8bgstypbj6twjv97dmfaa29, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 140ms } {
: Error: Cancelling after 145ms during execution } ] 2025-10-24T16:28:34.280148Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgstypbj6twjv97dmfaa29, Create QueryResponse for error on request, msg: 2025-10-24T16:28:34.436027Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsv3g798xve0v6cfsdb5r, Create QueryResponse for error on request, msg: 2025-10-24T16:28:34.852269Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzVmMDQ0MDktY2JmZmMzMjktODNkZGM4YWItZDc4ODc4YTU=, ActorId: [5:7564825999124021636:2512], ActorState: ExecuteState, TraceId: 01k8bgsvgm3bny7d5mecjnct0c, Create QueryResponse for error on request, msg: 2025-10-24T16:28:40.475912Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:40.475951Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TImportTests::UidAsIdempotencyKey >> TImportTests::ShouldRestoreTtlSettingsInDateTypeColumnMode >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Raw] >> TRestoreTests::ShouldFailOnVariousErrors [GOOD] >> TRestoreTests::ShouldSucceedWithDefaultFromLiteral[Zstd] [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> TRestoreTests::ZeroLengthEncryptedFileTreatedAsCorrupted >> TImportTests::ShouldRestoreTtlSettingsInValueSinceUnixEpochMode >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] >> TImportTests::CancelUponBuildingIndicesShouldSucceed [GOOD] >> TImportTests::AuditCompletedImport ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreIncrementalBackupFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.291185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.291208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.291214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.291222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.291229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.291234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.291244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.291258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.291388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.291466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.305782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.305803Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.308237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.308316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.308350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.310914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.310988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.311094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.311172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.311913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.311958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.312263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.312284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.312313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.312320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.312327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.312344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.313789Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2154] sender: [1:245:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.333551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.333621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.333680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.333689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.333752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.333769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.334345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.334387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.334438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.334449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.334455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.334460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.334865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.334878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.334883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.335263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.335272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.335277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.335282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.335760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.336267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.336321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.336517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.336547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.336556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.336634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.336641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.336687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.336701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.337163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.337171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... eshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:28:57.443464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:57.443567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:57.443579Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-10-24T16:28:57.443597Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:57.443602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:57.443608Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:57.443612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:57.443617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-10-24T16:28:57.443635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:357:2336] message: TxId: 281474976720758 2025-10-24T16:28:57.443643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:57.443649Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-10-24T16:28:57.443654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976720758:0 2025-10-24T16:28:57.443682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-10-24T16:28:57.444178Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-10-24T16:28:57.444196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976720758 2025-10-24T16:28:57.444208Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:57.444214Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-10-24T16:28:57.444650Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:57.444670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.444677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:507:2456] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:57.445266Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:57.445322Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 66us result status StatusSuccess 2025-10-24T16:28:57.445440Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:57.445521Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:28:57.445577Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 57us result status StatusSuccess 2025-10-24T16:28:57.445790Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelUponUploadResponseShouldSucceed[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.145864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.145887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.145893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.145899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.145905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.145910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.145919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.145943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.146065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.146122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.160249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.160274Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.163490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.163521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.163541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.165777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.166040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.166152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.166231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.167341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.167380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.167598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.167608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.167639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.167647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.167655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.167676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.169094Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.192948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.193023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.193076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.193083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.193142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.193157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.193788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.193842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.193894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.193903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.193908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.193913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.194324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.194338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.194343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.194702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.194717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.194722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.194728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.195376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.195796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.195863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.196012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.196034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.196038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.196087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.196093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.196123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.196134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.196536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.196543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... -03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-10-24T16:28:57.562800Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } 2025-10-24T16:28:57.563411Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:57.574123Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.574145Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.574163Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:10172 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 61A163B9-915C-45CF-A056-E5C22395C912 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-10-24T16:28:57.575207Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-10-24T16:28:57.575222Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-10-24T16:28:57.575258Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:28:57.586099Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.586119Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.586137Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-10-24T16:28:57.586201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 102 TxId: 103 2025-10-24T16:28:57.586208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 103:0, target opId# 102:0 2025-10-24T16:28:57.587708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-10-24T16:28:57.587740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.587747Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:57.587754Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 133 2025-10-24T16:28:57.588189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.588204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 102:0 ProgressState at tablet72057594046678944 2025-10-24T16:28:57.588209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.588637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:28:57.588738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:28:57.588746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:28:57.588762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:28:57.588765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:28:57.588834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:57.588841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:57.588846Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:28:57.588868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:28:57.588881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.588886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:439:2407] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:57.602029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:57.602052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:57.602072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:57.602082Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:28:57.602093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.602096Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.602100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:57.602104Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 133 -> 240 2025-10-24T16:28:57.602136Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:57.602587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.602677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.602684Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:57.602695Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:57.602699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:57.602703Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:57.602708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:57.602711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:57.602722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:57.602727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:57.602731Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:57.602734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:57.602753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:57.603186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.603201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:439:2407] TestWaitNotification: OK eventTxId 102 >> TImportTests::ShouldRestorePartitioningBySize >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnVariousErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.602996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.603024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.603030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.603037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.603045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.603049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.603058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.603086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.603209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.603271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.616604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.616625Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.619561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.619590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.619611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.621067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.621299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.621379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.621442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.622335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.622365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.622530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.622539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.622565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.622571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.622575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.622592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.623699Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.642535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.642588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.642629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.642636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.642699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.642714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.643306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.643336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.643387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.643397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.643402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.643407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.643756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.643767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.643771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.644239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.644250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.644256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.644263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.644910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.645333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.645364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.645533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.645562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.645569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.645630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.645638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.645674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.645686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.646194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.646205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 628: TTxOperationReply execute, operationId: 111:2, at schemeshard: 72057594046678944, message: Source { RawX1: 589 RawX2: 12884904440 } Origin: 72075186233409547 State: 2 TxId: 111 Step: 0 Generation: 2 2025-10-24T16:28:57.723333Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 111:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:28:57.723342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 111:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 589 RawX2: 12884904440 } Origin: 72075186233409547 State: 2 TxId: 111 Step: 0 Generation: 2 2025-10-24T16:28:57.723352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 111:2, shardIdx: 72057594046678944:3, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723357Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 111:2, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 111:2, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723368Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 111:2 129 -> 240 2025-10-24T16:28:57.723457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 596 RawX2: 12884904445 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-10-24T16:28:57.723462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 111, tablet: 72075186233409548, partId: 0 2025-10-24T16:28:57.723469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 111:0, at schemeshard: 72057594046678944, message: Source { RawX1: 596 RawX2: 12884904445 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-10-24T16:28:57.723475Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 111:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-10-24T16:28:57.723481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 111:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 596 RawX2: 12884904445 } Origin: 72075186233409548 State: 2 TxId: 111 Step: 0 Generation: 2 2025-10-24T16:28:57.723488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 111:0, shardIdx: 72057594046678944:2, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723491Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 111:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 111:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:28:57.723499Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 111:0 129 -> 240 2025-10-24T16:28:57.723723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-10-24T16:28:57.723739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-10-24T16:28:57.724644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-10-24T16:28:57.724675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 111 2025-10-24T16:28:57.724691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:2, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:2, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 111:2, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724786Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 111:2 ProgressState 2025-10-24T16:28:57.724799Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:2 progress is 2/3 2025-10-24T16:28:57.724804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 2/3 2025-10-24T16:28:57.724809Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:2 progress is 2/3 2025-10-24T16:28:57.724812Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 2/3 2025-10-24T16:28:57.724820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 111, ready parts: 2/3, is published: true 2025-10-24T16:28:57.724853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 111:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 111:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.724907Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 111:0 ProgressState 2025-10-24T16:28:57.724917Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:0 progress is 3/3 2025-10-24T16:28:57.724920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-10-24T16:28:57.724925Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#111:0 progress is 3/3 2025-10-24T16:28:57.724929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-10-24T16:28:57.724933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2025-10-24T16:28:57.724946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:348:2325] message: TxId: 111 2025-10-24T16:28:57.724952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-10-24T16:28:57.724959Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:0 2025-10-24T16:28:57.724964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 111:0 2025-10-24T16:28:57.724984Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-10-24T16:28:57.724990Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:1 2025-10-24T16:28:57.724994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 111:1 2025-10-24T16:28:57.725000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:28:57.725004Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 111:2 2025-10-24T16:28:57.725008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 111:2 2025-10-24T16:28:57.725014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:28:57.725538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.725553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [3:649:2596] TestWaitNotification: OK eventTxId 111 TestModificationResults wait txId: 114 2025-10-24T16:28:57.726376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRestore Restore { TableName: "IndexedTable" S3Settings { Endpoint: "localhost" Scheme: HTTP } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:57.726420Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_backup_restore_common.h:586: TRestore Propose, path: /MyRoot/IndexedTable, opId: 114:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.726442Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: Check failed: path: '/MyRoot/IndexedTable', error: path has indexes, request doesn't accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612, at schemeshard: 72057594046678944 2025-10-24T16:28:57.726947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "Check failed: path: \'/MyRoot/IndexedTable\', error: path has indexes, request doesn\'t accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:57.726991Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Check failed: path: '/MyRoot/IndexedTable', error: path has indexes, request doesn't accept it, source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_restore_common.h:612, operation: RESTORE TABLE, path: /MyRoot/IndexedTable TestModificationResult got TxId: 114, wait until txId: 114 >> TImportTests::ChangefeedsExportRestore [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> TImportTests::ChangefeedsExportRestoreUnhappyPropose Test command err: Trying to start YDB, gRPC: 14968, MsgBus: 64284 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/001f22/r3tmp/tmprCOe9F/pdisk_1.dat 2025-10-24T16:28:35.058384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:35.063606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826033923501047:2081] 1761323314964188 != 1761323314964191 2025-10-24T16:28:35.065457Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14968, node 1 2025-10-24T16:28:35.080086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:35.080097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:35.080100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:35.080149Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:35.088892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:35.088924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:35.089591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64284 TClient is connected to server localhost:64284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:35.207171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-10-24T16:28:35.208440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:28:35.208764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:35.208981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:28:35.209510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323315255, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:28:35.209716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976715657:0 2025-10-24T16:28:35.209726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-10-24T16:28:35.209762Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7564826038218468853:2240] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 3 2025-10-24T16:28:35.209847Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033923501015:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.209893Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033923501018:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.209906Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7564826033923501021:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-10-24T16:28:35.209955Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826038218468865:2251][/Root] Path was updated to new version: owner# [1:7564826033923501132:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.210037Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7564826038218468853:2240] Ack update: ack to# [1:7564826033923501394:2146], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 3 2025-10-24T16:28:35.210078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2025-10-24T16:28:35.210448Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826038218468917:2288][/Root] Path was updated to new version: owner# [1:7564826038218468910:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.210489Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7564826038218468916:2287][/Root] Path was updated to new version: owner# [1:7564826038218468909:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 3) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } waiting... 2025-10-24T16:28:35.210928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-10-24T16:28:35.449474Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 E1024 16:28:35.452147792 374573 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.452227259 374573 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.450176Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/fxx3/001f22/r3tmp/spilling-tmp-runner/node_1_3a77f376-e3dbc470-1a0045c0-2645ade5, actor: [1:7564826038218468985:2302] 2025-10-24T16:28:35.450281Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/fxx3/001f22/r3tmp/spilling-tmp-runner 2025-10-24T16:28:35.453474Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: TraceId: "01k8bgsw0c4rrrrvr2v9722dce", Request has 18444982750394.098150s seconds to be completed 2025-10-24T16:28:35.454214Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: TraceId: "01k8bgsw0c4rrrrvr2v9722dce", Created new session, sessionId: ydb://session/3?node_id=1&id=ZGRmZGRiNWItZTk4OTdmYTctYmRlODkxNGItYmEwMGFmZGE=, workerId: [1:7564826038218469002:2318], database: /Root, longSession: 1, local sessions count: 1 E1024 16:28:35.454780447 374572 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.454832981 374572 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.455198459 374572 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.455220051 374572 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1024 16:28:35.456116980 374573 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.456140711 374573 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.454264Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 01k8bgsw0c4rrrrvr2v9722dce 2025-10-24T16:28:35.454301Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:436: Subscribed for config changes. 2025-10-24T16:28:35.454307Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:443: Updated table service config. 2025-10-24T16:28:35.454315Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1519: Updated YQL logs priority to current level: 5 2025-10-24T16:28:35.455317Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826038218469005:2299][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7564826033923501132:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.459435Z node 1 :STREAMS_STORAGE_SERVICE ERROR: storage_proxy.cpp:170: Failed to init checkpoint storage: {
: Error: GRpc error: (2): Failed to create secure client channel } E1024 16:28:35.459652366 374573 dns_resolver_ares.cc:452] no server name supplied in dns URI E1024 16:28:35.459681794 374573 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-10-24T16:28:35.460127Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826038218469017:2301][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7564826033923501132:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-10-24T16:28:35.460151Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7564826038218469018:2302][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7564826033923501132:2117] ... time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-10-24T16:28:57.713103Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2678: ActorId: [9:7564826133024256313:2682] TxId: 281474976715698. Ctx: { TraceId: 01k8bgthyhb50rva67m0103tk2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTZkZTFlNjEtY2U1MzI3MzEtYmYxNGJhM2QtYWJhNWFjNzc=, PoolId: default}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-10-24T16:28:57.714226Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976715698. Ctx: { TraceId: 01k8bgthyhb50rva67m0103tk2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTZkZTFlNjEtY2U1MzI3MzEtYmYxNGJhM2QtYWJhNWFjNzc=, PoolId: default}. Compute actor has finished execution: [9:7564826133024256317:2686] 2025-10-24T16:28:57.714288Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976715698. Ctx: { TraceId: 01k8bgthyhb50rva67m0103tk2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTZkZTFlNjEtY2U1MzI3MzEtYmYxNGJhM2QtYWJhNWFjNzc=, PoolId: default}. Compute actor has finished execution: [9:7564826133024256318:2687] 2025-10-24T16:28:57.714441Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:886: TraceId: "01k8bgthyhb50rva67m0103tk2", Forwarded response to sender actor, requestId: 41, sender: [9:7564826133024256294:2681], selfId: [9:7564826124434319805:2200], source: [9:7564826133024256295:2682] 2025-10-24T16:28:57.715536Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Session closed, sessionId: ydb://session/3?node_id=9&id=YTZkZTFlNjEtY2U1MzI3MzEtYmYxNGJhM2QtYWJhNWFjNzc=, workerId: [9:7564826133024256295:2682], local sessions count: 0 2025-10-24T16:28:57.778380Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1323: Request has 18444982750371.773246s seconds to be completed 2025-10-24T16:28:57.778971Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1398: Created new session, sessionId: ydb://session/3?node_id=9&id=ZGNhZTZmYTctZDZkZWE0OWEtZWYwZTlmNDItMzc1N2ZmYzI=, workerId: [9:7564826133024256326:2690], database: /Root, longSession: 1, local sessions count: 1 2025-10-24T16:28:57.779020Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:577: Received create session request, trace_id: 2025-10-24T16:28:57.779161Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:700: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGNhZTZmYTctZDZkZWE0OWEtZWYwZTlmNDItMzc1N2ZmYzI=, PoolId: , DatabaseId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 43, targetId: [9:7564826133024256326:2690] 2025-10-24T16:28:57.779173Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1153: Scheduled timeout timer for requestId: 43 timeout: 300.000000s actor id: [9:7564826133024256328:2984] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningMerge [GOOD] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink >> TImportTests::UidAsIdempotencyKey [GOOD] >> TImportTests::TopicImport >> TImportTests::ShouldRestoreTtlSettingsInDateTypeColumnMode [GOOD] >> TImportTests::ShouldRestoreStorageSettings >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Raw] [GOOD] >> TRestoreTests::ShouldFailOnEmptyToken[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: 2025-10-24T16:28:45.697816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:45.740718Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:45.740820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:45.740834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00275d/r3tmp/tmpibxc4V/pdisk_1.dat 2025-10-24T16:28:45.828986Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:45.830208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.830254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.830409Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323325244922 != 1761323325244925 2025-10-24T16:28:45.868384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:45.925499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:45.961215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:46.047382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:46.082182Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:670:2564], Recipient [1:685:2574]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:46.082523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:670:2564], Recipient [1:685:2574]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:46.082616Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:685:2574] 2025-10-24T16:28:46.082679Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:46.086773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:671:2565], Recipient [1:688:2576]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:46.099161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:670:2564], Recipient [1:685:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:46.099312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:671:2565], Recipient [1:688:2576]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:46.099485Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:688:2576] 2025-10-24T16:28:46.099536Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:46.101087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:671:2565], Recipient [1:688:2576]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:46.102608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:46.102654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:46.102863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:28:46.102874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:28:46.102882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:28:46.102951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:46.102981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:46.103000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:717:2574] in generation 1 2025-10-24T16:28:46.103094Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:46.103112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:46.103231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-10-24T16:28:46.103238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-10-24T16:28:46.103244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-10-24T16:28:46.103271Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:46.103284Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:46.103291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2576] in generation 1 2025-10-24T16:28:46.117681Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:46.125979Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:28:46.126099Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:46.126147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-10-24T16:28:46.126154Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:28:46.126160Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:28:46.126166Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:46.126214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:46.126223Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-10-24T16:28:46.126233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:46.126241Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-10-24T16:28:46.126245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-10-24T16:28:46.126248Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-10-24T16:28:46.126252Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:28:46.126326Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:685:2574], Recipient [1:685:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.126336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.126347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:688:2576], Recipient [1:688:2576]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.126351Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.126485Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:28:46.126514Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:28:46.126520Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-10-24T16:28:46.126530Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-10-24T16:28:46.126555Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:46.126564Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:46.126573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:28:46.126579Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:46.126584Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:46.126591Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:28:46.126597Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:46.126605Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-10-24T16:28:46.126609Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:46.126613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-10-24T16:28:46.126617Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-10-24T16:28:46.126620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-10-24T16:28:46.126624Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-10-24T16:28:46.126629Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransa ... e 10 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2025-10-24T16:28:57.790682Z node 10 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [2000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-10-24T16:28:57.790692Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-10-24T16:28:57.790705Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:713:2587], Recipient [10:709:2584]: {TEvReadSet step# 2000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-10-24T16:28:57.790710Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:28:57.790714Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-10-24T16:28:57.821366Z node 10 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgtj1z2np33b9hhz3zajxq, Database: , SessionId: ydb://session/3?node_id=10&id=MTlkOTA0NGEtYThhZWY4MjQtYzk0M2MxOTEtMzJiYmI0Yjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:28:57.822128Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [10:971:2772], Recipient [10:709:2584]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-10-24T16:28:57.822180Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:28:57.822200Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-10-24T16:28:57.822219Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:57.822226Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:28:57.822232Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:57.822237Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:28:57.822250Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-10-24T16:28:57.822257Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:57.822261Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:57.822267Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:28:57.822272Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:28:57.822290Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-10-24T16:28:57.822339Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:28:57.822347Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[10:971:2772], 0} after executionsCount# 1 2025-10-24T16:28:57.822355Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[10:971:2772], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:57.822369Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[10:971:2772], 0} finished in read 2025-10-24T16:28:57.822379Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:57.822384Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:28:57.822388Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:28:57.822397Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:28:57.822408Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-10-24T16:28:57.822413Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:28:57.822417Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037888 has finished 2025-10-24T16:28:57.822422Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:28:57.822444Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:28:57.822642Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [10:971:2772], Recipient [10:709:2584]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:28:57.822655Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:28:57.822689Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [10:971:2772], Recipient [10:713:2587]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-10-24T16:28:57.822739Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-10-24T16:28:57.822751Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-10-24T16:28:57.822762Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:57.822766Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-10-24T16:28:57.822771Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:57.822775Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:28:57.822783Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-10-24T16:28:57.822790Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:57.822794Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:57.822799Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-10-24T16:28:57.822804Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-10-24T16:28:57.822818Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-10-24T16:28:57.822863Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:28:57.822873Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[10:971:2772], 1} after executionsCount# 1 2025-10-24T16:28:57.822879Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[10:971:2772], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:57.822889Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[10:971:2772], 1} finished in read 2025-10-24T16:28:57.822897Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:57.822901Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-10-24T16:28:57.822906Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:28:57.822910Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:28:57.822917Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037889 is Executed 2025-10-24T16:28:57.822921Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:28:57.822925Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037889 has finished 2025-10-24T16:28:57.822930Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-10-24T16:28:57.822942Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-10-24T16:28:57.823067Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [10:971:2772], Recipient [10:713:2587]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-10-24T16:28:57.823075Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithPermissionsChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.131186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.131212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.131218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.131225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.131232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.131236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.131245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.131271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.131395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.131464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.144761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.144781Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.147256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.147279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.147296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.148616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.148833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.148925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.148987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.150434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.150470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.150662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.150671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.150701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.150707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.150712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.150731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.152022Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.173554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.173618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.173658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.173664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.173710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.173720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.174331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.174373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.174427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.174436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.174442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.174447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.174873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.174884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.174894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.175296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.175311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.175317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.175324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.176046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.176484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.176533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.176718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.176743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.176752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.176815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.176823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.176859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.176871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.177505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.177516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-10-24T16:28:57.809963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.809971Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:57.810053Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:28:57.810062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:28:57.810066Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-10-24T16:28:57.810069Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-10-24T16:28:57.810074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:28:57.810086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C59B76B3-D020-4A4B-ABFE-10D403DC023B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:28:57.810683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-10-24T16:28:57.810716Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } 2025-10-24T16:28:57.821525Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:28:57.832362Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.832392Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.832412Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50DE7F03-3E2B-492D-9E07-3A5E06F6E9CA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:28:57.833505Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-10-24T16:28:57.833519Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-10-24T16:28:57.833548Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-10-24T16:28:57.834086Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:57.834096Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:57.834103Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:28:57.846449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 777 RawX2: 12884904601 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.846475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-10-24T16:28:57.846531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 777 RawX2: 12884904601 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.846549Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 777 RawX2: 12884904601 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:28:57.846567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.846575Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.846582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:28:57.846590Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:28:57.846657Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:57.847255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.847348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.847358Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:28:57.847375Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:57.847380Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:57.847385Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:57.847389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:57.847394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:28:57.847409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:28:57.847416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:57.847434Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:28:57.847439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:28:57.847467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:28:57.847947Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:28:57.847977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:28:57.847990Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:57.847996Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:28:57.848439Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:57.848463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:28:57.848471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:741:2683] TestWaitNotification: OK eventTxId 104 >> TImportTests::ShouldRestoreTtlSettingsInValueSinceUnixEpochMode [GOOD] >> TImportTests::ShouldRestorePartitioningBySize [GOOD] >> TImportTests::ShouldRestorePartitioningByLoad >> TImportTests::ShouldSucceedImportTableWithUniqueIndex >> TImportTests::ChangefeedTablePrefixConflict [GOOD] >> TImportTests::ChangefeedTablePrefixConflictDiffTableDesc >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] >> TImportTests::TopicImport [GOOD] >> TImportTests::TopicExportImport >> TRestoreTests::ZeroLengthEncryptedFileTreatedAsCorrupted [GOOD] >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-10-24T16:28:45.854421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:45.891107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:45.891188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:45.891199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002774/r3tmp/tmp0qYaQM/pdisk_1.dat 2025-10-24T16:28:45.969755Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:45.970842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:45.970881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:45.971010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323325405443 != 1761323325405446 2025-10-24T16:28:46.002117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:46.050856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:46.095987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:46.172341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:46.187709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:46.187951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:46.188042Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:28:46.188100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:46.197321Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:46.197503Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:46.197530Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:46.197691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:28:46.197700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:28:46.197707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:28:46.197759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:46.197818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:46.197831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:28:46.209136Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:46.212852Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:28:46.212935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:46.212965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:28:46.212969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:28:46.212972Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:28:46.212976Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:46.213051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.213058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.213152Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:28:46.213171Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:28:46.213185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:46.213190Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:46.213196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:28:46.213201Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:46.213203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:46.213208Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:28:46.213212Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:46.213220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:46.213224Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:46.213229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:28:46.213308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:28:46.213313Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:28:46.213328Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:28:46.213372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:28:46.213380Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:28:46.213393Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:28:46.213401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:46.213404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:28:46.213409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:28:46.213412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:46.213457Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:28:46.213460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:28:46.213463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:28:46.213466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:28:46.213474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:28:46.213477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:28:46.213480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:28:46.213482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:28:46.213486Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:28:46.213736Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:28:46.213747Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:28:46.224120Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:28:46.224153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:46.224161Z n ... : NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.056652Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.056657Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [9:970:2778], serverId# [9:971:2779], sessionId# [0:0:0] 2025-10-24T16:28:58.056674Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553169, Sender [9:969:2777], Recipient [9:731:2598]: NKikimrTxDataShard.TEvGetInfoRequest 2025-10-24T16:28:58.056782Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [9:974:2782], Recipient [9:731:2598]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.056789Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.056793Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [9:973:2781], serverId# [9:974:2782], sessionId# [0:0:0] 2025-10-24T16:28:58.056814Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [9:972:2780], Recipient [9:731:2598]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-10-24T16:28:58.056833Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-10-24T16:28:58.056840Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-10-24T16:28:58.056845Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-10-24T16:28:58.056852Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-10-24T16:28:58.056863Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-10-24T16:28:58.056868Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-10-24T16:28:58.056873Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:58.056877Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-10-24T16:28:58.056885Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2025-10-24T16:28:58.056891Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-10-24T16:28:58.056895Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:58.056899Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-10-24T16:28:58.056902Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-10-24T16:28:58.056914Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-10-24T16:28:58.056966Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037890 Complete read# {[9:972:2780], 1002} after executionsCount# 1 2025-10-24T16:28:58.056974Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037890 read iterator# {[9:972:2780], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:58.056984Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037890 read iterator# {[9:972:2780], 1002} finished in read 2025-10-24T16:28:58.056992Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-10-24T16:28:58.056996Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-10-24T16:28:58.057000Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-10-24T16:28:58.057004Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-10-24T16:28:58.057011Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:3] at 72075186224037890 is Executed 2025-10-24T16:28:58.057015Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-10-24T16:28:58.057019Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:3] at 72075186224037890 has finished 2025-10-24T16:28:58.057024Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-10-24T16:28:58.057036Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-10-24T16:28:58.057158Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [9:977:2785], Recipient [9:727:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.057163Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.057167Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [9:976:2784], serverId# [9:977:2785], sessionId# [0:0:0] 2025-10-24T16:28:58.057176Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553169, Sender [9:975:2783], Recipient [9:727:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-10-24T16:28:58.057312Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [9:980:2788], Recipient [9:727:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.057320Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:58.057324Z node 9 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [9:979:2787], serverId# [9:980:2788], sessionId# [0:0:0] 2025-10-24T16:28:58.057348Z node 9 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [9:978:2786], Recipient [9:727:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-10-24T16:28:58.057369Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-10-24T16:28:58.057380Z node 9 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-10-24T16:28:58.057385Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2555: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-10-24T16:28:58.057392Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-10-24T16:28:58.057403Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-10-24T16:28:58.057408Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-10-24T16:28:58.057412Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-10-24T16:28:58.057416Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-10-24T16:28:58.057424Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2025-10-24T16:28:58.057430Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-10-24T16:28:58.057434Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-10-24T16:28:58.057438Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-10-24T16:28:58.057442Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-10-24T16:28:58.057453Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-10-24T16:28:58.057472Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037891 Complete read# {[9:978:2786], 1003} after executionsCount# 1 2025-10-24T16:28:58.057478Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037891 read iterator# {[9:978:2786], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:28:58.057488Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037891 read iterator# {[9:978:2786], 1003} finished in read 2025-10-24T16:28:58.057495Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-10-24T16:28:58.057499Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-10-24T16:28:58.057503Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-10-24T16:28:58.057507Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-10-24T16:28:58.057514Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037891 is Executed 2025-10-24T16:28:58.057517Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-10-24T16:28:58.057521Z node 9 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037891 has finished 2025-10-24T16:28:58.057527Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-10-24T16:28:58.057539Z node 9 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> TRestoreTests::ShouldFailOnEmptyToken[Raw] [GOOD] >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> TImportWithRebootsTests::ShouldSucceedOnTableWithChecksum >> TImportTests::TopicExportImport [GOOD] >> TImportTests::ShouldRestoreStorageSettings [GOOD] >> TImportTests::ShouldRestoreSplitPoints >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Zstd] >> TImportTests::ShouldRestorePartitioningByLoad [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> TImportTests::ShouldRestorePerAzReadReplicas >> TImportTests::ShouldSucceedImportTableWithUniqueIndex [GOOD] >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex >> TRestoreTests::ExportImportWithPermissionsCorruption >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] [GOOD] >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit [GOOD] >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] >> TImportTests::ChangefeedTablePrefixConflictDiffTableDesc [GOOD] >> TImportTests::ChangefeedTablePrefixConflictDiffChangefeedDesc >> TImportTests::ChangefeedsExportRestoreUnhappyPropose [GOOD] >> TImportTests::ShouldSucceedWithoutTableProfiles ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::TopicExportImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:58.005146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:58.005167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.005171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:58.005177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:58.005181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:58.005185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:58.005191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.005210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:58.005311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:58.005361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:58.016295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:58.016315Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:58.018692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:58.018714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:58.018737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:58.020038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:58.020189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:58.020261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.020305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:58.021152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.021185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:58.021355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.021362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.021387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:58.021392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:58.021397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:58.021415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.022565Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:58.039212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:58.039272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.039311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:58.039317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:58.039394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:58.039411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:58.040006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.040036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:58.040078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.040084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:58.040088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:58.040091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:58.040579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.040598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:58.040605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:58.041012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.041023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.041030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.041037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:58.041812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:58.042338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:58.042400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:58.042623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.042658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:58.042666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.042732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:58.042740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.042782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:58.042796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:58.044133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.044144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:28:58.845104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:28:58.845844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.845941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.845947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-10-24T16:28:58.845975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:28:58.846004Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.846009Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-10-24T16:28:58.846013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 5 2025-10-24T16:28:58.846030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.846035Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:1 ProgressState 2025-10-24T16:28:58.846047Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:1 progress is 2/2 2025-10-24T16:28:58.846051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-10-24T16:28:58.846055Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:1 progress is 2/2 2025-10-24T16:28:58.846059Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-10-24T16:28:58.846063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 2/2, is published: false 2025-10-24T16:28:58.846068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 2/2 2025-10-24T16:28:58.846074Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:0 2025-10-24T16:28:58.846077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710759:0 2025-10-24T16:28:58.846087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-10-24T16:28:58.846092Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:1 2025-10-24T16:28:58.846095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710759:1 2025-10-24T16:28:58.846109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-10-24T16:28:58.846113Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710759, publications: 2, subscribers: 1 2025-10-24T16:28:58.846119Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710759, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2025-10-24T16:28:58.846123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710759, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-10-24T16:28:58.846524Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.846539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.846544Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710759 2025-10-24T16:28:58.846549Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-10-24T16:28:58.846554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-10-24T16:28:58.846689Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.846701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.846704Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710759 2025-10-24T16:28:58.846708Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-10-24T16:28:58.846712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:28:58.846720Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710759, subscribers: 1 2025-10-24T16:28:58.846724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:140:2161] 2025-10-24T16:28:58.847946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.848111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-10-24T16:28:58.848143Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-10-24T16:28:58.848156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710759 2025-10-24T16:28:58.848169Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:58.848175Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-10-24T16:28:58.849256Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:58.849299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:58.849307Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:565:2509] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:58.850057Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored/Topic_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:58.850094Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored/Topic_1" took 49us result status StatusSuccess 2025-10-24T16:28:58.850232Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored/Topic_1" PathDescription { Self { Name: "Topic_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic_1" PathId: 5 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 10 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 50000000 BurstSize: 50000000 SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } YdbDatabasePath: "/MyRoot" } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_45_Query >> TImportTests::ShouldRestorePerAzReadReplicas [GOOD] >> TImportTests::ShouldRestoreSplitPoints [GOOD] >> TImportTests::ViewCreationRetry ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnEmptyToken[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:58.006409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:58.006427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:58.006436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:58.006440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:58.006453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:58.006459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:58.006561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:58.006606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:58.020084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:58.020105Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:58.022613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:58.022658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:58.022678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:58.024175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:58.024351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:58.024439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.024500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:58.025585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.025621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:58.025813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.025823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.025857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:58.025865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:58.025872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:58.025893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.027100Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:58.050573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:58.050663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.050733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:58.050743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:58.050815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:58.050832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:58.051559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:58.051673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:58.051690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:58.051695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:58.052162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:58.052574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.052599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:58.053279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:58.053747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:58.053804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:58.054019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.054117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:58.054124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.054163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:58.054176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:58.054612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... { TaskId: 1 } 2025-10-24T16:28:59.027008Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:552: [Import] [s3:102] Restart: attempt# 0 2025-10-24T16:28:59.029733Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:11188 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9F8DC309-E1EA-40DF-970D-74F05FA12168 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:28:59.031131Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:28:59.031159Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv.zst FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-10-24T16:28:59.031407Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.031435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:28:59.031504Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.031511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11188 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FC6DB4F8-2EFA-4CAD-8DD9-684690B0CAE2 2025-10-24T16:28:59.031597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 15 2025-10-24T16:28:59.031610Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.031808Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ContentLength: 15 } } 2025-10-24T16:28:59.031937Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:59.031954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:28:59.031959Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:28:59.031965Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:28:59.031970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:28:59.031990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-10-24T16:28:59.032731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:28:59.043529Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:59.054268Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:59.054291Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 9e2cb8a1ca146d055332641ef8e7b2a6 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:28:59.054310Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-14 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:11188 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 52F5E8CA-4185-441E-A762-E558E2049CF1 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-14 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 15 2025-10-24T16:28:59.055210Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9e2cb8a1ca146d055332641ef8e7b2a6 Body: 15b } 2025-10-24T16:28:59.055226Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 15, body-size# 15 2025-10-24T16:28:59.055256Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 0, error# Empty token on line: "a1",, writtenBytes# 0, writtenRows# 0 2025-10-24T16:28:59.055269Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 0, size# 8 2025-10-24T16:28:59.068115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.068139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:28:59.068158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.068184Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Empty token on line: \"a1\"," BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.068198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.068204Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.068209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:28:59.068215Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:28:59.068251Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.068728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.068819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.068830Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:28:59.068844Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:59.068849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:59.068854Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:28:59.068857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:59.068862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:28:59.068878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:28:59.068885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:28:59.068891Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:28:59.068895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:28:59.068922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:28:59.069347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.069360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 >> TImportTests::ShouldSucceedWithoutTableProfiles [GOOD] >> TImportTests::ShouldWriteBillRecordOnServerlessDb >> TImportWithRebootsTests::CancelShouldSucceedOnSimpleTable >> TRestoreTests::ShouldSucceedOnMultiShardTable[Zstd] >> THealthCheckTest::TestStateStorageYellow [GOOD] >> THealthCheckTest::TestStateStorageRed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldRestoreTableWithVolatilePartitioningSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.818138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.818168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.818173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.818181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.818187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.818191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.818200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.818235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.818370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.818437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.831572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.831595Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.834537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.834568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.834592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.836110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.836337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.836440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.836510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.837696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.837737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.837948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.837959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.837992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.838000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.838010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.838033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.839486Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.861406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.861480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.861532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.861539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.861597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.861609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.863086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.863128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.863182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.863193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.863198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.863203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.863686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.863700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.863706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.864090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.864104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.864110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.864117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.864755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.865172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.865219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.865387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.865411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.865418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.865477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.865484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.865518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.865529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.866057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.866065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 46678944, message: Source { RawX1: 1165 RawX2: 12884904939 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-10-24T16:28:59.083099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409552, partId: 0 2025-10-24T16:28:59.083127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1165 RawX2: 12884904939 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-10-24T16:28:59.083143Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 1165 RawX2: 12884904939 } Origin: 72075186233409552 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 2 } 2025-10-24T16:28:59.083158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.083164Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.083169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-10-24T16:28:59.083176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409553, at schemeshard: 72057594046678944 2025-10-24T16:28:59.083181Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:28:59.083227Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.085757Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.085797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.085803Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:28:59.085817Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:59.085820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.085823Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:59.085825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.085829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:28:59.085841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:28:59.085845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.085849Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:28:59.085852Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:28:59.085886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:28:59.086433Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:28:59.086445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:28:59.086455Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.086460Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:28:59.086878Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.086901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.086908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1090:2988] TestWaitNotification: OK eventTxId 105 2025-10-24T16:28:59.087111Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Restored" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.087174Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Restored" took 73us result status StatusSuccess 2025-10-24T16:28:59.087380Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Restored" PathDescription { Self { Name: "Restored" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710762 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Restored" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 2 MaxPartitionsCount: 2 SplitByLoadSettings { Enabled: false } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 3 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\000\003\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedsExportRestoreUnhappyPropose [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:56.482138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:56.482165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.482170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:56.482179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:56.482186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:56.482191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:56.482201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:56.482231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:56.482363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:56.482427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:56.498461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:56.498482Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:56.501808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:56.501841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:56.501869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:56.503420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:56.503681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:56.503776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.503841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:56.504963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.505005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:56.505234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.505246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:56.505284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:56.505293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:56.505300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:56.505326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.506813Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:56.522356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:56.522425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.522474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:56.522480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:56.522532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:56.522542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:56.523122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:56.523215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:56.523226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:56.523230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:56.523578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:56.523921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:56.523933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.523938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:56.524384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:56.524709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:56.524755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:56.524905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:56.524923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:56.524928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.524978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:56.524983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:56.525013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:56.525022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:56.525405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:56.525410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... le NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: d41d8cd98f00b204e9800998ecf8427e ContentLength: 0 } } 2025-10-24T16:28:59.144337Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:281474976710763] HeadObject: key# /data_00.csv.sha256 REQUEST: HEAD /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:15400 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E48B4306-C34E-4907-974F-214AF113E2C0 amz-sdk-request: attempt=1 content-type: application/xml2025-10-24T16:28:59.144730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 2025-10-24T16:28:59.144887Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:737: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: b1e4e03f030176e093c2773f30899223 ContentLength: 76 } } 2025-10-24T16:28:59.144899Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv.sha256, range# 0-75 REQUEST: GET /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:15400 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5C4D5B7E-44FA-4050-9969-0C674E34DF80 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-75 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 76 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:28:59.145492Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:751: [Import] [s3:281474976710763] HandleChecksum NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: b1e4e03f030176e093c2773f30899223 Body: 76b } 2025-10-24T16:28:59.145569Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:28:59.156479Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: d41d8cd98f00b204e9800998ecf8427e ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } } 2025-10-24T16:28:59.156518Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: d41d8cd98f00b204e9800998ecf8427e ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: Sha256State { H: 1779033703 H: 3144134277 H: 1013904242 H: 2773480762 H: 1359893119 H: 2600822924 H: 528734635 H: 1541459225 Nh: 0 Nl: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Data: 0 Num: 0 MdLen: 32 } DownloadState: } 2025-10-24T16:28:59.156536Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 0, writtenRows# 0 2025-10-24T16:28:59.176071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1516 RawX2: 12884905165 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.176101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409554, partId: 0 2025-10-24T16:28:59.176135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1516 RawX2: 12884905165 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.176152Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 1516 RawX2: 12884905165 } Origin: 72075186233409554 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:28:59.176171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:9, shard: 72075186233409554, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.176177Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.176183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409554, at schemeshard: 72057594046678944 2025-10-24T16:28:59.176193Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:28:59.176243Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.180121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.180268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.180284Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:28:59.180311Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:59.180317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.180324Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:28:59.180327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.180334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:28:59.180354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:28:59.180364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:28:59.180371Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:28:59.180376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:28:59.180410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-10-24T16:28:59.184042Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:28:59.184076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:28:59.184093Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.184100Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:28:59.184146Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:640: TImport::TTxProgress: Allocate txId: info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' } 2025-10-24T16:28:59.188335Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.188386Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72057594046678944 2025-10-24T16:28:59.188400Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.188406Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1171: TImport::TTxProgress: OnAllocateResult: txId# 281474976710764, id# 106 2025-10-24T16:28:59.188426Z node 3 :IMPORT INFO: schemeshard_import__create.cpp:604: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' }, txId# 281474976710764 2025-10-24T16:28:59.188476Z node 3 :IMPORT NOTICE: schemeshard_import__create.cpp:764: TImport::TTxProgress: creation changefeed failed, cancelling, info# { Id: 106 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Restored' DstPathId: [OwnerId: 72057594046678944, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 SrcPath: SrcPrefix: Issue: 'Invalid changefeed format' } 2025-10-24T16:28:59.189014Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.189044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.189052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1450:3226] TestWaitNotification: OK eventTxId 106 >> TRestoreTests::ExportImportWithPermissionsCorruption [GOOD] >> TRestoreTests::ExportImportWithSchemeChecksumCorruption >> TRestoreTests::ExportImportWithDataCorruption[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreSplitPoints [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:58.006177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:58.006203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:58.006215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:58.006221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:58.006225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:58.006235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:58.006370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:58.006431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:58.022374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:58.022395Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:58.025155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:58.025184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:58.025207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:58.026813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:58.026995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:58.027096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.027168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:58.028158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.028190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:58.028408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.028430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.028468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:58.028474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:58.028479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:58.028498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.029771Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:58.048797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:58.048867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.048914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:58.048921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:58.048999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:58.049014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:58.049677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.049719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:58.049774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.049782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:58.049787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:58.049792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:58.050322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.050337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:58.050344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:58.051011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.051030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:58.051533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:58.051998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:58.052046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:58.052191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.052262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:58.052267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.052294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:58.052303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:58.052727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... lPathId: 2] 2025-10-24T16:28:59.204149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:59.204473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:59.204578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:59.204589Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-10-24T16:28:59.204603Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:59.204607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.204612Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:59.204615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.204619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-10-24T16:28:59.204633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:357:2336] message: TxId: 281474976720758 2025-10-24T16:28:59.204640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.204646Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-10-24T16:28:59.204650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976720758:0 2025-10-24T16:28:59.204675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-10-24T16:28:59.205299Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-10-24T16:28:59.205317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976720758 2025-10-24T16:28:59.205327Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.205333Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-10-24T16:28:59.205805Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.205827Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.205833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:507:2456] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:59.206447Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.206494Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 56us result status StatusSuccess 2025-10-24T16:28:59.206584Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.206663Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:28:59.206719Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 58us result status StatusSuccess 2025-10-24T16:28:59.206861Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 2 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "b" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\001\000\000\000b" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestorePerAzReadReplicas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:58.086615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:58.086635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.086639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:58.086645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:58.086649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:58.086652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:58.086659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.086680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:58.086804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:58.086868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:58.100543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:58.100565Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:58.103067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:58.103099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:58.103127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:58.104878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:58.105097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:58.105200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.105278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:58.106393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.106450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:58.106671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.106681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.106719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:58.106727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:58.106733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:58.106757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.108239Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:58.126671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:58.126752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.126804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:58.126812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:58.126872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:58.126887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:58.127632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.127677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:58.127740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.127750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:58.127756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:58.127761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:58.128179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.128191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:58.128195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:58.128519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.128528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.128535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.128541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:58.129067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:58.129486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:58.129527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:58.129709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.129736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:58.129744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.129810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:58.129819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.129858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:58.129872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:58.130291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.130300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... n.h:116: Unable to make a bill: kind# TRestore, opId# 281474976720758:0, reason# domain is not a serverless db, domain# /MyRoot/User, domainPathId# [OwnerId: 72075186233409546, LocalPathId: 1], IsDomainSchemeShard: 0, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 2], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:28:59.243127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:59.243254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720758:0, at schemeshard: 72075186233409546 2025-10-24T16:28:59.243267Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976720758:0 ProgressState 2025-10-24T16:28:59.243288Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:59.243293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.243299Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720758:0 progress is 1/1 2025-10-24T16:28:59.243303Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.243308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720758, ready parts: 1/1, is published: true 2025-10-24T16:28:59.243326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:357:2336] message: TxId: 281474976720758 2025-10-24T16:28:59.243335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720758 ready parts: 1/1 2025-10-24T16:28:59.243342Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720758:0 2025-10-24T16:28:59.243347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976720758:0 2025-10-24T16:28:59.243376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-10-24T16:28:59.243980Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976720758 2025-10-24T16:28:59.244003Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976720758 2025-10-24T16:28:59.244017Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.244024Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720758 2025-10-24T16:28:59.244560Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.244586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.244594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:507:2456] TestWaitNotification: OK eventTxId 103 2025-10-24T16:28:59.245205Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.245258Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 65us result status StatusSuccess 2025-10-24T16:28:59.245352Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.245405Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-10-24T16:28:59.245452Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table" took 47us result status StatusSuccess 2025-10-24T16:28:59.245606Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720757 CreateStep: 150 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "created_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Uint32" TypeId: 2 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } FollowerGroups { FollowerCount: 1 RequireAllDataCenters: true FollowerCountPerDataCenter: true } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TImportTests::ChangefeedTablePrefixConflictDiffChangefeedDesc [GOOD] >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Raw] >> TImportTests::ShouldWriteBillRecordOnServerlessDb [GOOD] >> TImportTests::TablePermissions >> TRestoreTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesStandardBatch >> TImportTests::ShouldFailOnAbsentData >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedTablePrefixConflictDiffChangefeedDesc [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:57.880953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:57.880982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:57.880988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:57.880995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:57.881002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:57.881007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:57.881016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:57.881044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:57.881183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:57.881249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:57.896460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:57.896488Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:57.899798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:57.899836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:57.899885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:57.901412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:57.901656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:57.901761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.901847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:57.902880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:57.902926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:57.903181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:57.903194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:57.903232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:57.903241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:57.903248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:57.903278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.904766Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:57.926703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:57.926776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.926826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:57.926833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:57.926899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:57.926913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:57.928167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.928219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:57.928275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.928286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:57.928290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:57.928296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:57.928777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.928793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:57.928801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:57.929222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.929233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.929238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.929245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:57.929917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:57.930305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:57.930344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:57.930504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.930524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:57.930530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.930591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:57.930598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.930627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:57.930636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:57.931063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:57.931072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... eamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 2 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "updates_feed1" TopicPath: "/MyRoot/table/updates_feed1/streamImpl" YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 3 ScaleThresholdSeconds: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "my_consumer" } } Partitions { PartitionId: 0 TabletId: 72075186233409547 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409547 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 2 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.747880Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table_prefix" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.747908Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/table_prefix" took 30us result status StatusSuccess 2025-10-24T16:28:59.747983Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table_prefix" PathDescription { Self { Name: "table_prefix" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710761 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table_prefix" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.748036Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table_prefix/updates_feed1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.748057Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/table_prefix/updates_feed1" took 22us result status StatusSuccess 2025-10-24T16:28:59.748108Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table_prefix/updates_feed1" PathDescription { Self { Name: "updates_feed1" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 281474976710763 CreateStep: 5000008 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710763 CreateStep: 5000008 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } CdcStreamDescription { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.748156Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table_prefix/updates_feed1/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.748171Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/table_prefix/updates_feed1/streamImpl" took 17us result status StatusSuccess 2025-10-24T16:28:59.748239Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table_prefix/updates_feed1/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710763 CreateStep: 5000008 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 7 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "updates_feed1" TopicPath: "/MyRoot/table_prefix/updates_feed1/streamImpl" YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 4 ScaleThresholdSeconds: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "my_consumer" } } Partitions { PartitionId: 0 TabletId: 72075186233409550 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409551 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TImportTests::ViewCreationRetry [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnIndexedTable >> TRestoreTests::ExportImportWithSchemeChecksumCorruption [GOOD] >> TRestoreTests::ExportImportWithSchemeChecksumAbsence >> TImportTests::TablePermissions [GOOD] >> TRestoreTests::ExportImportWithDataCorruption[Raw] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumCorruption[Raw] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Raw] [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Zstd] >> TRestoreTests::ShouldSucceedOnMultipleFramesStandardBatch [GOOD] >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedExportImportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:58.006360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:58.006384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:58.006397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:58.006404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:58.006408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:58.006417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:58.006441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:58.006548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:58.006604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:58.019059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:58.019081Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:58.021465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:58.021488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:58.021507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:58.022911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:58.023184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:58.023298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.023367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:58.024666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.024709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:58.024945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.024955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:58.024991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:58.025000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:58.025010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:58.025035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.026558Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:58.050996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:58.051068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:58.051124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:58.051182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:58.051195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:58.051939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.051986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:58.052038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:58.052053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:58.052058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:58.052519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:58.052537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:58.052987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.053002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:58.053007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.053013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:58.053650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:58.054077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:58.054108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:58.054285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.054370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:58.054378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:58.054409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:58.054419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:58.054870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:58.054880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... tiateTxDone: 1, SnapshotStepId: 5000012, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:28:59.808795Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-10-24T16:28:59.809065Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710764 Done 2025-10-24T16:28:59.809084Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976710764 Done TBuildInfo{ IndexBuildId: 281474976710764, Uid: 104-0-0, DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 7], IndexType: EIndexTypeGlobalUnique, IndexName: ByValue, IndexColumn: value, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:140:2161], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710765, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710766, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000012, ApplyTxId: 281474976710768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:28:59.809090Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976710764, subscribers count# 1 2025-10-24T16:28:59.809106Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710764 2025-10-24T16:28:59.809110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710764 2025-10-24T16:28:59.809116Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:28:59.809119Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710764 2025-10-24T16:28:59.809578Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:28:59.809601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:28:59.809608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:859:2798] TestWaitNotification: OK eventTxId 104 2025-10-24T16:28:59.809732Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.809779Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 55us result status StatusSuccess 2025-10-24T16:28:59.809909Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710762 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "ByValue" LocalPathId: 8 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.809996Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:28:59.810026Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 34us result status StatusSuccess 2025-10-24T16:28:59.810176Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710766 CreateStep: 5000012 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710766 CreateStep: 5000012 ParentPathId: 8 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 8 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TImportTests::ShouldFailOnAbsentData [GOOD] >> TImportTests::ShouldCheckQuotas ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:133:2058] recipient: [1:115:2145] 2025-10-24T16:28:59.316808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:59.316832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.316838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:59.316846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:59.316853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:59.316858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:59.316867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.316896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:59.317027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.317095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:59.335361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:28:59.335393Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:59.335468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.337917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:59.337948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:59.337972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:59.339382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:59.339572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:59.339672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.339722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:59.340742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.340783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:59.341042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.341052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.341073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:59.341082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.341089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:59.341124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.342412Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:132:2156] sender: [1:247:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.359468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:59.359549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.359601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:59.359608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:59.359664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:59.359675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:59.360366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.360415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:59.360479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.360490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:59.360495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:59.360500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:59.360923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.360934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.360941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:59.361275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.361284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.361291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.361298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:59.361797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:59.362179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:59.362225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:59.362425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.362450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.362458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.362523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:59.362529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.362570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:59.362582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:59.362990Z node 1 :FLAT_TX_SCHEMESHARD ... CHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-10-24T16:29:00.388760Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:29:00.388765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:29:00.388779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:29835 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B5D9BAEB-9EDA-4085-81BE-F163364057BF amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-10-24T16:29:00.389288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 REQUEST: HEAD /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:29835 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4B8D73D9-732C-4C6E-BA74-F4EA3ACD9E02 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 64 REQUEST: GET /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:29835 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5E0DC550-73C8-43E2-9B33-CC7FF97AC3F3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-63 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.sha256 / 64 FAKE_COORDINATOR: Erasing txId 281474976710758 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:29835 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 975EF8B2-0C5E-4CAB-949F-068171B1C39F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-10-24T16:29:00.403501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 12884904200 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:00.403521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710758, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:00.403541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 12884904200 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:00.403554Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710758:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 340 RawX2: 12884904200 } Origin: 72075186233409546 State: 2 TxId: 281474976710758 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:00.403568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710758:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.403575Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.403579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710758:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:00.403585Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710758:0 129 -> 240 2025-10-24T16:29:00.403622Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710758:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:00.404169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.404257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.404267Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2025-10-24T16:29:00.404283Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:29:00.404288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:29:00.404293Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710758:0 progress is 1/1 2025-10-24T16:29:00.404297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:29:00.404302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2025-10-24T16:29:00.404318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710758 2025-10-24T16:29:00.404326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-10-24T16:29:00.404333Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710758:0 2025-10-24T16:29:00.404338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710758:0 2025-10-24T16:29:00.404362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:00.404776Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-10-24T16:29:00.404793Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710758 2025-10-24T16:29:00.405212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:00.405228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 2025-10-24T16:29:00.405330Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:00.405385Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 61us result status StatusSuccess 2025-10-24T16:29:00.405528Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "eve" ACL: "\n\016\010\001\020\211\004\032\005alice \003\n\017\010\001\020\366\213\001\032\005alice \003\n\014\010\001\020\211\004\032\003bob \003" EffectiveACL: "\n\016\010\001\020\211\004\032\005alice \003\n\017\010\001\020\366\213\001\032\005alice \003\n\014\010\001\020\211\004\032\003bob \003" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TImportTests::ShouldRestoreMinMaxPartitionsCount >> TRestoreTests::ShouldFailOnFileWithoutNewLinesStandardBatch[Zstd] [GOOD] >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] >> TRestoreTests::ShouldExpandBuffer[Raw] >> TRestoreTests::ShouldNotDecompressEntirePortionAtOnce >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch [GOOD] >> TImportTests::ShouldCheckQuotas [GOOD] >> TRestoreTests::ExportImportWithSchemeChecksumAbsence [GOOD] >> TImportTests::ShouldBlockSplit >> TRestoreTests::ExportImportWithDataCorruption[Zstd] >> TRestoreTests::ExportImportWithDataChecksumCorruption[Raw] [GOOD] >> TRestoreTests::ExportImportWithDataChecksumCorruption[Zstd] >> TImportWithRebootsTests::ShouldSucceedOnSimpleTable >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] [GOOD] >> TRestoreTests::ShouldNotDecompressEntirePortionAtOnce [GOOD] >> TRestoreTests::ShouldRestoreDefaultValuesFromLiteral >> TImportTests::ShouldFailOnFileWithoutNewLines >> TImportTests::ShouldRestoreMinMaxPartitionsCount [GOOD] >> TImportTests::ShouldRestoreKeyBloomFilter >> TRestoreTests::ShouldExpandBuffer[Raw] [GOOD] >> TRestoreTests::ShouldExpandBuffer[Zstd] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnMultipleFramesSmallBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:59.738264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:59.738297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.738303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:59.738313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:59.738322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:59.738326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:59.738338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.738366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:59.738508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.738581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:59.753874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:59.753897Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:59.757239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:59.757277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:59.757313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:59.759020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:59.759232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:59.759354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.759466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:59.760881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.760936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:59.761189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.761199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.761234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:59.761242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.761248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:59.761277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.762615Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.781563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:59.781630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.781679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:59.781685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:59.781744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:59.781759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:59.782381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.782426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:59.782486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.782495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:59.782500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:59.782505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:59.782926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.782937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.782943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:59.783303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.783314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.783318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.783322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:59.783830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:59.784314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:59.784352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:59.784541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.784575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.784582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.784652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:59.784659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.784694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:59.784708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:59.785176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.785186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 81 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:00.857133Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 28-34 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5669 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A5AD1935-C9B6-4466-9FED-ABA6D78D39B3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=28-34 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:29:00.857541Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-10-24T16:29:00.857555Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-10-24T16:29:00.857565Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 35-41 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5669 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0C09CB1-09FE-425F-93AE-18607A269E95 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=35-41 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:29:00.858024Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-10-24T16:29:00.858038Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 60, body-size# 7 2025-10-24T16:29:00.858065Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:29:00.858527Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:00.858537Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:00.858545Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 42-48 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5669 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C65DB777-CF06-48FB-9FF8-01DB669D6D7D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=42-48 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:29:00.858949Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-10-24T16:29:00.858954Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-10-24T16:29:00.858960Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 49-55 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5669 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1F1463CE-9C7B-48D5-9C94-FB9A87F51005 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=49-55 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:29:00.860463Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 7b } 2025-10-24T16:29:00.860488Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 7 2025-10-24T16:29:00.860499Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 56-59 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5669 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CB313A85-0EF5-4170-88D2-DE26A5FB0736 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=56-59 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 60 2025-10-24T16:29:00.861064Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: e0a029185b0e1ad2f41736bc4b274b81 Body: 4b } 2025-10-24T16:29:00.861076Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 37, content-length# 60, body-size# 4 2025-10-24T16:29:00.861100Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:29:00.861608Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:00.861623Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: e0a029185b0e1ad2f41736bc4b274b81 ProcessedBytes: 60 WrittenBytes: 24 WrittenRows: 3 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:00.861630Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 24, writtenRows# 3 2025-10-24T16:29:00.873377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:29:00.873400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:00.873424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:29:00.873438Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 24 RowsProcessed: 3 } 2025-10-24T16:29:00.873451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.873456Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.873461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:00.873467Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:00.873500Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:00.874081Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.874134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.874142Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:00.874158Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:00.874162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:00.874167Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:00.874170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:00.874175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:00.874190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:29:00.874198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:00.874203Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:00.874207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:00.874241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:00.874865Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:00.874881Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithSchemeChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:59.183743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:59.183770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.183776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:59.183784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:59.183790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:59.183794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:59.183803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.183846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:59.183982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.184052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:59.196342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:59.196360Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:59.198760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:59.198788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:59.198810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:59.203105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:59.203281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:59.203380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.203454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:59.204296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.204336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:59.204559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.204572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.204605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:59.204613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.204620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:59.204642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.205773Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.224640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:59.224717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.224780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:59.224786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:59.224839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:59.224852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:59.225420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.225452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:59.225501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.225507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:59.225511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:59.225515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:59.225828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.225835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.225838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:59.226145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.226153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.226156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.226161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:59.226631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:59.227024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:59.227050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:59.227194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.227215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.227220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.227268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:59.227273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.227301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:59.227310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:59.227693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.227700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-10-24T16:29:00.901236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.901242Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:00.901411Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:00.901425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:00.901430Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-10-24T16:29:00.901435Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 REQUEST: HEAD /data_00.csv HTTP/1.1 2025-10-24T16:29:00.901440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 HEADERS: Host: localhost:18914 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D93B999F-470F-4C80-8754-3B041666F77A amz-sdk-request: attempt=1 content-type: application/xml user-agent: 2025-10-24T16:29:00.901468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:29:00.901657Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } 2025-10-24T16:29:00.902301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:00.923209Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:29:00.933973Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:00.933994Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:00.934006Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18914 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C223BA53-7677-4DFF-A5A6-7B7ABB787C36 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:29:00.934953Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-10-24T16:29:00.934965Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-10-24T16:29:00.934998Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-10-24T16:29:00.935536Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:00.935554Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:00.935561Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:29:00.947894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 772 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:00.947915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-10-24T16:29:00.947939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 772 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:00.947953Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 772 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:00.947970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.947975Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.947981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:29:00.947987Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:29:00.948034Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:00.948487Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.948557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.948565Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:29:00.948578Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:29:00.948582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:00.948588Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:29:00.948592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:00.948597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:29:00.948608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:29:00.948616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:00.948622Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:29:00.948626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:29:00.948650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:29:00.949027Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:29:00.949042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:29:00.949051Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:00.949056Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:29:00.949486Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:00.949520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:29:00.949526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:739:2683] TestWaitNotification: OK eventTxId 104 >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] >> TImportTests::ShouldBlockSplit [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query >> TRestoreTests::ExportImportWithDataCorruption[Zstd] [GOOD] >> TRestoreTests::ExportImportWithMetadataChecksumCorruption >> TImportTests::ShouldFailOnInvalidSchema >> TRestoreTests::ExportImportWithDataChecksumCorruption[Zstd] [GOOD] >> TImportTests::ShouldRestoreKeyBloomFilter [GOOD] >> TImportTests::ShouldRestoreIndexTableUniformPartitionsCount ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: 2025-10-24T16:28:46.219018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:46.258272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:46.258373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:46.258385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/00274b/r3tmp/tmpgw6i1F/pdisk_1.dat 2025-10-24T16:28:46.358316Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:46.359497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:46.359543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:46.359693Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1761323325775135 != 1761323325775138 2025-10-24T16:28:46.399260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:46.450201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:46.496768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:46.572512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:46.588439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:46.588681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:46.588749Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:675:2566] 2025-10-24T16:28:46.588818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:28:46.598897Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:675:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:46.599121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:28:46.599154Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:28:46.599409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-10-24T16:28:46.599423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-10-24T16:28:46.599431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-10-24T16:28:46.599527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:28:46.599567Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:28:46.599584Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-10-24T16:28:46.610168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:28:46.615649Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-10-24T16:28:46.615750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:28:46.615785Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-10-24T16:28:46.615792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-10-24T16:28:46.615798Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-10-24T16:28:46.615804Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-10-24T16:28:46.615908Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:675:2566], Recipient [1:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.615919Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:28:46.616033Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-10-24T16:28:46.616054Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-10-24T16:28:46.616073Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-10-24T16:28:46.616085Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:28:46.616095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-10-24T16:28:46.616102Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-10-24T16:28:46.616108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-10-24T16:28:46.616114Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-10-24T16:28:46.616121Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-10-24T16:28:46.616133Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:677:2567], Recipient [1:675:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:46.616139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:46.616147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:672:2564], serverId# [1:677:2567], sessionId# [0:0:0] 2025-10-24T16:28:46.616248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:677:2567] 2025-10-24T16:28:46.616256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:28:46.616279Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-10-24T16:28:46.616340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-10-24T16:28:46.616354Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-10-24T16:28:46.616374Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-10-24T16:28:46.616383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-10-24T16:28:46.616388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-10-24T16:28:46.616395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-10-24T16:28:46.616400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:46.616514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-10-24T16:28:46.616520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-10-24T16:28:46.616525Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-10-24T16:28:46.616529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-10-24T16:28:46.616545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-10-24T16:28:46.616549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-10-24T16:28:46.616555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-10-24T16:28:46.616558Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-10-24T16:28:46.616564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-10-24T16:28:46.616844Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:675:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-10-24T16:28:46.616853Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-10-24T16:28:46.627186Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-10-24T16:28:46.627227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-10-24T16:28:46.627236Z n ... tConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-10-24T16:29:01.415407Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:01.415412Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-10-24T16:29:01.415447Z node 11 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-10-24T16:29:01.415454Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [11:707:2583], Recipient [11:711:2585]: {TEvReadSet step# 2000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2025-10-24T16:29:01.415459Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:01.415462Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2025-10-24T16:29:01.446417Z node 11 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k8bgtnk80e4f6vymxeessnkm, Database: , SessionId: ydb://session/3?node_id=11&id=MWMyNjcyNC00ZGQ5ODFiMC04YjRiNmUxNy04MTcyZmEzNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.447287Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [11:971:2772], Recipient [11:707:2583]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-10-24T16:29:01.447344Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-10-24T16:29:01.447365Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-10-24T16:29:01.447390Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-10-24T16:29:01.447396Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-10-24T16:29:01.447404Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-10-24T16:29:01.447409Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-10-24T16:29:01.447439Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-10-24T16:29:01.447446Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-10-24T16:29:01.447450Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-10-24T16:29:01.447456Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-10-24T16:29:01.447460Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-10-24T16:29:01.447481Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-10-24T16:29:01.447544Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:29:01.447553Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037888 Complete read# {[11:971:2772], 0} after executionsCount# 1 2025-10-24T16:29:01.447563Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037888 read iterator# {[11:971:2772], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:29:01.447588Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037888 read iterator# {[11:971:2772], 0} finished in read 2025-10-24T16:29:01.447602Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-10-24T16:29:01.447606Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-10-24T16:29:01.447611Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-10-24T16:29:01.447615Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-10-24T16:29:01.447630Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-10-24T16:29:01.447634Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-10-24T16:29:01.447639Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037888 has finished 2025-10-24T16:29:01.447645Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-10-24T16:29:01.447668Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-10-24T16:29:01.447870Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [11:971:2772], Recipient [11:707:2583]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-10-24T16:29:01.447882Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-10-24T16:29:01.448006Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [11:971:2772], Recipient [11:711:2585]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-10-24T16:29:01.448031Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2452: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-10-24T16:29:01.448042Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-10-24T16:29:01.448052Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-10-24T16:29:01.448056Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-10-24T16:29:01.448060Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-10-24T16:29:01.448064Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-10-24T16:29:01.448072Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-10-24T16:29:01.448077Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-10-24T16:29:01.448081Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-10-24T16:29:01.448084Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-10-24T16:29:01.448090Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-10-24T16:29:01.448102Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1578: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-10-24T16:29:01.448127Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-10-24T16:29:01.448133Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2163: 72075186224037889 Complete read# {[11:971:2772], 1} after executionsCount# 1 2025-10-24T16:29:01.448138Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2137: 72075186224037889 read iterator# {[11:971:2772], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-10-24T16:29:01.448146Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2188: 72075186224037889 read iterator# {[11:971:2772], 1} finished in read 2025-10-24T16:29:01.448152Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-10-24T16:29:01.448156Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-10-24T16:29:01.448159Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-10-24T16:29:01.448163Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-10-24T16:29:01.448169Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-10-24T16:29:01.448172Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-10-24T16:29:01.448176Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037889 has finished 2025-10-24T16:29:01.448179Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2687: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-10-24T16:29:01.448190Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2736: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-10-24T16:29:01.448292Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [11:971:2772], Recipient [11:711:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-10-24T16:29:01.448299Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3409: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldFailOnFileWithoutNewLinesSmallBatch[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:00.171436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:00.171467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:00.171473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:00.171481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:00.171488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:00.171492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:00.171502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:00.171538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:00.171674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:00.171745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:00.186312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:00.186339Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:00.190350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:00.190394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:00.190424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:00.192026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:00.192301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:00.192423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.192519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:00.194050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:00.194100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:00.194331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:00.194344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:00.194375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:00.194383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:00.194390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:00.194419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.195816Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:00.214567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:00.214645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.214702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:00.214709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:00.214773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:00.214787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:00.219871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.219925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:00.219982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.219991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:00.219994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:00.219997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:00.220764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.220775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:00.220779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:00.221170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.221183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.221187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.221192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:00.221766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:00.222429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:00.222459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:00.222603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.222631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:00.222640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.222701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:00.222709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.222749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:00.222759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:00.223153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:00.223160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... tpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.254162Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.254184Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.254200Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 16-16 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6D282E14-A6D0-4E9B-A22B-EE6122CB9885 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=16-16 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.254879Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.254913Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.254932Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 17-17 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 363C2B3E-EC91-4C4D-95A8-94BF4E1C4300 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=17-17 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.255567Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.255579Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.255588Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 18-18 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 55EBBC1B-8E46-40FF-9D22-24650537F656 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=18-18 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.258056Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.258073Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.258085Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 19-19 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C5A42A91-D0B5-4BFE-9366-1C062D2F0F27 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=19-19 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.258798Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.258811Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.258820Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 20-20 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 45A188D1-539D-4E44-A833-EE1CA373F6B4 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=20-20 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.259336Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.259348Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.259357Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 21-21 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 23D430B1-43A5-43E5-A5EB-E3AE1B1091AA amz-sdk-request: attempt=1 content-type: application/xml range: bytes=21-21 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 22 2025-10-24T16:29:01.259952Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 074c5adf029fca0d8b15e306cee4e962 Body: 1b } 2025-10-24T16:29:01.259963Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 22, body-size# 1 2025-10-24T16:29:01.259974Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 0, error# Cannot process data: cannot find new line symbol, writtenBytes# 0, writtenRows# 0 2025-10-24T16:29:01.262452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:01.262472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:01.262502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:01.262519Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: false Explain: "Cannot process data: cannot find new line symbol" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:01.262546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.262552Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.262557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:01.262566Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:01.262615Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.263224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.263326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.263336Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:01.263354Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:01.263359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:01.263365Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:01.263368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:01.263374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:01.263389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2318] message: TxId: 102 2025-10-24T16:29:01.263395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:01.263399Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:01.263403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:01.263438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:01.263815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:01.263826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:400:2370] TestWaitNotification: OK eventTxId 102 >> TImportTests::ShouldFailOnFileWithoutNewLines [GOOD] >> TImportTests::ShouldFailOnEmptyToken >> TRestoreTests::ShouldExpandBuffer[Zstd] [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldBlockSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:00.285355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:00.285377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:00.285383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:00.285392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:00.285398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:00.285413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:00.285425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:00.285454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:00.285586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:00.285652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:00.304064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:00.304086Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:00.307126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:00.307152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:00.307188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:00.308812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:00.309082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:00.309188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.309265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:00.310543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:00.310596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:00.310834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:00.310847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:00.310882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:00.310890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:00.310897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:00.310918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.312474Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:00.332766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:00.332837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.332890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:00.332898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:00.332963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:00.332980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:00.333639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.333685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:00.333739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.333749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:00.333754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:00.333760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:00.334164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.334177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:00.334183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:00.334622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.334634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:00.334640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.334646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:00.335269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:00.335642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:00.335681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:00.335813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:00.335833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:00.335838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.335925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:00.335932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:00.335961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:00.335970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:00.336520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:00.336528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hemeshard__operation_split_merge.cpp:141: Initializing scheme on dst datashard: 72075186233409549 splitOp: 103:0 alterVersion: 1 at tablet: 72057594046678944 2025-10-24T16:29:01.439782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269553152 2025-10-24T16:29:01.439828Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 269553152 2025-10-24T16:29:01.439869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409548 2025-10-24T16:29:01.439875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409549 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-10-24T16:29:01.446809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:29:01.446823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:29:01.446895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:29:01.446903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-10-24T16:29:01.446909Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:29:01.469310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409548 2025-10-24T16:29:01.469334Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 103:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 103:0, at schemeshard: 72057594046678944 message# OperationCookie: 103 TabletId: 72075186233409548 2025-10-24T16:29:01.469690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.491450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409549 2025-10-24T16:29:01.491471Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 103:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 103:0, at schemeshard: 72057594046678944 message# OperationCookie: 103 TabletId: 72075186233409549 2025-10-24T16:29:01.491481Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 3 -> 131 2025-10-24T16:29:01.491876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.491920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.491927Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.491933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 103:0 Starting split on src datashard 72075186233409546 splitOpId# 103:0 at tablet 72057594046678944 2025-10-24T16:29:01.492385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553154 2025-10-24T16:29:01.492419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 2025-10-24T16:29:01.530342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-10-24T16:29:01.530370Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 103:0 HandleReply TEvSplitAck, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-10-24T16:29:01.530503Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 131 -> 132 2025-10-24T16:29:01.530544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:29:01.531130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.531196Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.531208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:29:01.531290Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.531297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-10-24T16:29:01.531397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.531407Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.531427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186233409546 on partitioning changed splitOp# 103 at tablet 72057594046678944 2025-10-24T16:29:01.531580Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:29:01.531595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-10-24T16:29:01.531601Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-10-24T16:29:01.531607Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-10-24T16:29:01.531613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-10-24T16:29:01.531628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-10-24T16:29:01.538290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269553158 2025-10-24T16:29:01.538977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-10-24T16:29:01.539547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: OperationCookie: 103 TabletId: 72075186233409546 2025-10-24T16:29:01.539567Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 103:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:01.539592Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:29:01.539598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:01.539605Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:29:01.539612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:01.539618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:29:01.539630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:461:2409] message: TxId: 103 2025-10-24T16:29:01.539643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:01.539649Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:29:01.539654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:29:01.539698Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-10-24T16:29:01.542218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.542231Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 103:0 2025-10-24T16:29:01.542283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:01.542292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:623:2547] TestWaitNotification: OK eventTxId 103 >> TImportTests::ShouldFailOnInvalidSchema [GOOD] >> TImportTests::ShouldFailOnInvalidValue >> TRestoreTests::ShouldSucceedOnMultiShardTable[Raw] >> KqpLimits::QSReplySize-useSink [GOOD] >> TRestoreTests::ShouldRestoreDefaultValuesFromLiteral [GOOD] >> TRestoreTests::ShouldHandleOverloadedShard |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |62.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithDataChecksumCorruption[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:59.939374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:59.939394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.939398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:59.939404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:59.939408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:59.939411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:59.939428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.939452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:59.939546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.939598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:59.949996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:59.950014Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:59.952886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:59.952920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:59.952944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:59.954321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:59.954542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:59.954638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.954742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:59.955751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.955805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:59.956037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.956047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.956083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:59.956090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.956097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:59.956119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.957434Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.975115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:59.975179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.975226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:59.975233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:59.975303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:59.975316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:59.976027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:59.976128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:59.976139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:59.976142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:59.976471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:59.976754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.976766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.976772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:59.977234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:59.977576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:59.977618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:59.977770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.977789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:59.977796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.977859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:59.977867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.977896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:59.977905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:59.978339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.978345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-10-24T16:29:01.738112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-10-24T16:29:01.738117Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:1059 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade2025-10-24T16:29:01.738134Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 : 2025-10-24T16:29:01.738167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E105BCA9-04B6-4A29-8D64-287F7621CF25 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:29:01.738193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2025-10-24T16:29:01.738382Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:29:01.738396Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:281474976710765] HeadObject: key# /data_00.csv.zst 2025-10-24T16:29:01.738986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:1059 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0D3CC452-BCE1-4511-AE18-30CD0AA88747 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-10-24T16:29:01.739270Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f ContentLength: 20 } } 2025-10-24T16:29:01.750058Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710765 2025-10-24T16:29:01.771132Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:01.771157Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710765] Process download info at 'DownloadInfo': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:01.771177Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710765] GetObject: key# /data_00.csv.zst, range# 0-19 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:1059 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D66D81CC-212A-4AA3-82C8-FBF67D064F5E amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-19 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 20 2025-10-24T16:29:01.772145Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710765] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: daa167f46d135bcc1fbc9f42f80e496f Body: 20b } 2025-10-24T16:29:01.772165Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710765] Content size: processed-bytes# 0, content-length# 20, body-size# 20 2025-10-24T16:29:01.772222Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710765] Upload rows: count# 1, size# 36 2025-10-24T16:29:01.772825Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710765] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409549 Status: 0 Info: { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:01.772841Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710765] Process download info at 'UploadResponse': info# { DataETag: daa167f46d135bcc1fbc9f42f80e496f ProcessedBytes: 20 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:01.772849Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710765] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:29:01.785732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 918 RawX2: 12884904730 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:01.785761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-10-24T16:29:01.785789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 918 RawX2: 12884904730 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:01.785804Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 918 RawX2: 12884904730 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:01.785819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.785826Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.785831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-10-24T16:29:01.785838Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710765:0 129 -> 240 2025-10-24T16:29:01.785880Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.786267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.786325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.786330Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-10-24T16:29:01.786340Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-10-24T16:29:01.786343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:29:01.786346Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710765:0 progress is 1/1 2025-10-24T16:29:01.786349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:29:01.786352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-10-24T16:29:01.786362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710765 2025-10-24T16:29:01.786367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-10-24T16:29:01.786371Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710765:0 2025-10-24T16:29:01.786374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710765:0 2025-10-24T16:29:01.786394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-10-24T16:29:01.786668Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-10-24T16:29:01.786678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710765 2025-10-24T16:29:01.786686Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:01.786690Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710765 2025-10-24T16:29:01.786960Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:01.786972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:29:01.786977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:884:2813] TestWaitNotification: OK eventTxId 104 |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |62.4%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] [GOOD] >> TImportTests::ShouldRestoreIncrementalBackupFlagNullAsFalse |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> TImportTests::ShouldFailOnInvalidValue [GOOD] >> TImportTests::ShouldFailOnNonUniqDestinationPaths >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> TRestoreTests::ExportImportWithMetadataChecksumCorruption [GOOD] >> TRestoreTests::ExportImportWithMetadataChecksumAbsence >> TImportTests::ShouldFailOnEmptyToken [GOOD] >> TImportTests::ShouldFailOnInvalidPath >> TImportTests::ShouldFailOnNonUniqDestinationPaths [GOOD] >> TRestoreTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TRestoreTests::ShouldSucceedOnLargeData[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldCountWrittenBytesAndRows[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.133137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.133161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.133166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.133174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.133181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.133185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.133194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.133220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.133341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.133398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.146886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.146906Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.149666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.149696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.149718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.151204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.151372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.151464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.151524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.152715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.152758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.152968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.152980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.153009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.153017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.153026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.153048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.154549Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.171219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.171289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.171332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.171338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.171392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.171403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.171995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.172078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.172088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.172092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.172403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.172647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.172660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.172664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.173255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.173630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.173670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.173875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.173905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.173913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.173995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.174004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.174043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.174056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.174443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.174450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 02.171130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:29:02.171188Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.171193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-10-24T16:29:02.171252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.171260Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.171376Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:29:02.171385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-10-24T16:29:02.171388Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-10-24T16:29:02.171392Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:29:02.171396Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:29:02.171407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:9996 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 90CED307-5E3E-4CB6-8A2C-E3028CDB885B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:29:02.171794Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:29:02.171807Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv.zst 2025-10-24T16:29:02.172161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:9996 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6D3903A0-1681-4E49-8484-698B9FD84E88 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 37 2025-10-24T16:29:02.172653Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: fe451e85ab0310efdda31e730583289f ContentLength: 37 } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:29:02.173089Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:02.183901Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:02.183925Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:02.183946Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-36 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:9996 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 559D930B-7E80-4B1E-9C8F-976EB4894D1A amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-36 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 37 2025-10-24T16:29:02.184796Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: fe451e85ab0310efdda31e730583289f Body: 37b } 2025-10-24T16:29:02.184812Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 37, body-size# 37 2025-10-24T16:29:02.184852Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 2, size# 60 2025-10-24T16:29:02.198727Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:02.198755Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: fe451e85ab0310efdda31e730583289f ProcessedBytes: 37 WrittenBytes: 16 WrittenRows: 2 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:02.198766Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 16, writtenRows# 2 2025-10-24T16:29:02.211164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-10-24T16:29:02.211185Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:02.211210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-10-24T16:29:02.211226Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 16 RowsProcessed: 2 } 2025-10-24T16:29:02.211254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.211260Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.211265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:02.211272Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:02.211311Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.211805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.211843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.211868Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:02.211884Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:02.211888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:02.211894Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:02.211898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:02.211903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:02.211918Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:29:02.211924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:02.211931Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:02.211935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:02.211964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:02.212406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:02.212418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TImportTests::ShouldFailOnInvalidPath [GOOD] |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |62.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> TImportTests::ShouldRestoreIncrementalBackupFlagNullAsFalse [GOOD] >> TImportTests::ShouldRestoreIndexTableSplitPoints >> TImportTests::ShouldRestoreIndexTableUniformPartitionsCount [GOOD] >> TRestoreTests::ExportImportWithMetadataChecksumAbsence [GOOD] >> TImportTests::ChangefeedWithPartitioning ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 13937, MsgBus: 19425 2025-10-24T16:28:25.084771Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825993810323506:2078];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:25.087111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:28:25.088724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0017ef/r3tmp/tmpdcRQVA/pdisk_1.dat 2025-10-24T16:28:25.119716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:25.119743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:25.120614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:25.128921Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13937, node 1 2025-10-24T16:28:25.159784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:25.159796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:25.159798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:25.159843Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19425 2025-10-24T16:28:25.206851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:25.232140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:25.244209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.271197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:25.293505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... waiting... 2025-10-24T16:28:25.309225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.688121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825993810325111:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.688186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.688402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825993810325121:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.688409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.781807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.813276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.834692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.847434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.862420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.873411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.884228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.900543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:25.921045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825993810325982:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.921066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.921142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825993810325987:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.921151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825993810325988:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.921154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:25.922086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB c ... &id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.522837Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727075. Ctx: { TraceId: 01k8bgtnpg7j9nrfagqh3j5q01, Database: , SessionId: ydb://session/3?node_id=2&id=ZTI3MWZlYTUtNTVjYjI4ZWQtMzRiZjE2OTMtYWVlMmMwOTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.523181Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727074. Ctx: { TraceId: 01k8bgtnpg89tc9hqnf2fzjd2s, Database: , SessionId: ydb://session/3?node_id=2&id=NjAwMmE0NDUtMmI1MmFlMWMtYWE4N2M4ZDktZGZjYTk0NWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.523315Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727076. Ctx: { TraceId: 01k8bgtnpf1237jj6vnzq9jjb2, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.524352Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727077. Ctx: { TraceId: 01k8bgtnpf970f494qmxqwvpvk, Database: , SessionId: ydb://session/3?node_id=2&id=NjBhMDU0YmYtZjk2MzI2ODItYjQyOTNmNmYtNWRmMDY3OWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.525278Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727078. Ctx: { TraceId: 01k8bgtnpg89tc9hqnf2fzjd2s, Database: , SessionId: ydb://session/3?node_id=2&id=NjAwMmE0NDUtMmI1MmFlMWMtYWE4N2M4ZDktZGZjYTk0NWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.525418Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727079. Ctx: { TraceId: 01k8bgtnpg7j9nrfagqh3j5q01, Database: , SessionId: ydb://session/3?node_id=2&id=ZTI3MWZlYTUtNTVjYjI4ZWQtMzRiZjE2OTMtYWVlMmMwOTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.525520Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727080. Ctx: { TraceId: 01k8bgtnpf970f494qmxqwvpvk, Database: , SessionId: ydb://session/3?node_id=2&id=NjBhMDU0YmYtZjk2MzI2ODItYjQyOTNmNmYtNWRmMDY3OWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.526564Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727081. Ctx: { TraceId: 01k8bgtnpk89m43qyt27r71ssh, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.527377Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727082. Ctx: { TraceId: 01k8bgtnpg7j9nrfagqh3j5q01, Database: , SessionId: ydb://session/3?node_id=2&id=ZTI3MWZlYTUtNTVjYjI4ZWQtMzRiZjE2OTMtYWVlMmMwOTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.527726Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727083. Ctx: { TraceId: 01k8bgtnpg89tc9hqnf2fzjd2s, Database: , SessionId: ydb://session/3?node_id=2&id=NjAwMmE0NDUtMmI1MmFlMWMtYWE4N2M4ZDktZGZjYTk0NWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.530373Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727084. Ctx: { TraceId: 01k8bgtnpk89m43qyt27r71ssh, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.530944Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727085. Ctx: { TraceId: 01k8bgtnpk89m43qyt27r71ssh, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.533345Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727086. Ctx: { TraceId: 01k8bgtnpw3ba3d9w2mhas9stk, Database: , SessionId: ydb://session/3?node_id=2&id=YmJiYzc1ODEtZTFiOWQyZDctY2VjZTRkYjMtNDRmNDU3Mg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.538654Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727087. Ctx: { TraceId: 01k8bgtnq10gsj1p44fspnywyh, Database: , SessionId: ydb://session/3?node_id=2&id=ZGVhZGJlOTctODk2MGFiOGQtNzdkYjdjMDItZDkyOGI3NDg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.539978Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727088. Ctx: { TraceId: 01k8bgtnq1cmftt0vaetfbm7ym, Database: , SessionId: ydb://session/3?node_id=2&id=YmI3ZjdhMGMtNTcyZWNkMjktZGRjYTM2YmMtNDJhYWZmOGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.540037Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727089. Ctx: { TraceId: 01k8bgtnq23d7rkj92v8jzhgkf, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.540336Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727090. Ctx: { TraceId: 01k8bgtnq14gyzvcxtpqkzxbz3, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.541298Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727091. Ctx: { TraceId: 01k8bgtnq14gyzvcxtpqkzxbz3, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.541524Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727092. Ctx: { TraceId: 01k8bgtnq1cmftt0vaetfbm7ym, Database: , SessionId: ydb://session/3?node_id=2&id=YmI3ZjdhMGMtNTcyZWNkMjktZGRjYTM2YmMtNDJhYWZmOGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.541777Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727093. Ctx: { TraceId: 01k8bgtnq23d7rkj92v8jzhgkf, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.542374Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727094. Ctx: { TraceId: 01k8bgtnq14gyzvcxtpqkzxbz3, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.542673Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727095. Ctx: { TraceId: 01k8bgtnq42xxxxm93n0m03ypg, Database: , SessionId: ydb://session/3?node_id=2&id=ZTI3MWZlYTUtNTVjYjI4ZWQtMzRiZjE2OTMtYWVlMmMwOTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.543594Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727096. Ctx: { TraceId: 01k8bgtnq14gyzvcxtpqkzxbz3, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.543826Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727097. Ctx: { TraceId: 01k8bgtnq42xxxxm93n0m03ypg, Database: , SessionId: ydb://session/3?node_id=2&id=ZTI3MWZlYTUtNTVjYjI4ZWQtMzRiZjE2OTMtYWVlMmMwOTc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.551178Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727098. Ctx: { TraceId: 01k8bgtnqe2rj0vc22rtp093qx, Database: , SessionId: ydb://session/3?node_id=2&id=YmI3ZjdhMGMtNTcyZWNkMjktZGRjYTM2YmMtNDJhYWZmOGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.551214Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727099. Ctx: { TraceId: 01k8bgtnqectexd29c4etcdn7z, Database: , SessionId: ydb://session/3?node_id=2&id=NjAwMmE0NDUtMmI1MmFlMWMtYWE4N2M4ZDktZGZjYTk0NWY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-10-24T16:29:01.553377Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727100. Ctx: { TraceId: 01k8bgtnqefy7z1ktt2g8s013p, Database: , SessionId: ydb://session/3?node_id=2&id=ZGVhZGJlOTctODk2MGFiOGQtNzdkYjdjMDItZDkyOGI3NDg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.553486Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727102. Ctx: { TraceId: 01k8bgtnqg4k409zv4shdemwrn, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.553620Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727101. Ctx: { TraceId: 01k8bgtnqgfz125qk486jn9tfb, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.554547Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727103. Ctx: { TraceId: 01k8bgtnqefy7z1ktt2g8s013p, Database: , SessionId: ydb://session/3?node_id=2&id=ZGVhZGJlOTctODk2MGFiOGQtNzdkYjdjMDItZDkyOGI3NDg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.555061Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727104. Ctx: { TraceId: 01k8bgtnqgfz125qk486jn9tfb, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.555106Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727105. Ctx: { TraceId: 01k8bgtnqg4k409zv4shdemwrn, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.556059Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727106. Ctx: { TraceId: 01k8bgtnqgfz125qk486jn9tfb, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.556359Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727107. Ctx: { TraceId: 01k8bgtnqefy7z1ktt2g8s013p, Database: , SessionId: ydb://session/3?node_id=2&id=ZGVhZGJlOTctODk2MGFiOGQtNzdkYjdjMDItZDkyOGI3NDg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.556661Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727108. Ctx: { TraceId: 01k8bgtnqg4k409zv4shdemwrn, Database: , SessionId: ydb://session/3?node_id=2&id=ZjdkOGVmZmQtMzkyYzY3NzMtNzZkOTRiMDgtODEyNTA5YTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.557458Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727109. Ctx: { TraceId: 01k8bgtnqgfz125qk486jn9tfb, Database: , SessionId: ydb://session/3?node_id=2&id=ZDU5ZjE1OS1hY2E1YTkzOS1hZDAxYjM3My1hZWY4NTQxMg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-10-24T16:29:01.557611Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976727110. Ctx: { TraceId: 01k8bgtnqefy7z1ktt2g8s013p, Database: , SessionId: ydb://session/3?node_id=2&id=ZGVhZGJlOTctODk2MGFiOGQtNzdkYjdjMDItZDkyOGI3NDg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS 2025-10-24T16:29:01.708217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:29:01.708238Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded >> TRestoreTests::ShouldSucceedOnLargeData[Raw] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldFailOnNonUniqDestinationPaths [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.930324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.930352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.930358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.930366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.930373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.930377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.930387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.930419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.930551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.930616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.945763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.945784Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.947995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.948018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.948039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.949098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.949236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.949309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.949352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.950102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.950139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.950314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.950323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.950359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.950366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.950371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.950396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.951594Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.966144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.966202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.966240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.966246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.966291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.966300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.966790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.966824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.966865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.966872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.966876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.966880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.967235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.967244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.967250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.967687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.967705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.967712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.967719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.968493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.969032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.969074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.969323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.969355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.969363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.969434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.969445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.969495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.969510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.969963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.969974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:02.460868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.460926Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:02.460967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.460977Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:02.460982Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:02.460988Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:02.461333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.461345Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.461354Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:02.461682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.461691Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.461697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.461703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.461734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:02.462038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:02.462069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:02.462241Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.462263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:02.462270Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.462334Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:02.462341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.462371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.462385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:02.462764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.462772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.462808Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.462817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:29:02.462867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.462874Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:29:02.462887Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:02.462891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.462897Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:02.462900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.462905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:29:02.462912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.462918Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:29:02.462923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:29:02.462934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:29:02.462940Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:29:02.462944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:29:02.463054Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:02.463066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:02.463072Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:29:02.463077Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:29:02.463082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.463095Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:29:02.463626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:29:02.463703Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.464980Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:276:2265] Bootstrap 2025-10-24T16:29:02.465235Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:276:2265] Become StateWork (SchemeCache [3:281:2270]) 2025-10-24T16:29:02.465291Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-10-24T16:29:02.465317Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 101 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:26158" scheme: HTTP items { source_prefix: "a" destination_path: "/MyRoot/Table" } items { source_prefix: "b" destination_path: "/MyRoot/Table" } } } 2025-10-24T16:29:02.465361Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# BAD_REQUEST, error# Duplicate destination_path: /MyRoot/Table 2025-10-24T16:29:02.465368Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 101 2025-10-24T16:29:02.465409Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-10-24T16:29:02.465466Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:29:02.465885Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-10-24T16:29:02.465931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:29:02.465938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:29:02.465990Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:29:02.466006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:02.466012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:288:2277] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldFailOnInvalidPath [GOOD] >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.623403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.623444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.623449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.623457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.623464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.623468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.623477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.623506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.623660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.623728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.635157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.635179Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.638020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.638052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.638074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.639648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.639892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.640003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.640085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.641205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.641260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.641558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.641571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.641619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.641628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.641635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.641664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.643199Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.666911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.667001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.667064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.667072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.667139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.667155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.667812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.667900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.667962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.667973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.667980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.667986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.668455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.668467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.668478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.668828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.668838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.668844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.668853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.669669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.670119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.670153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.670351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.670376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.670383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.670452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.670459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.670494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.670506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.670954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.670962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... b/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:02.553655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.553696Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:02.553736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.553745Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:02.553751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:02.553758Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:02.554134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.554147Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.554153Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:02.554573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.554588Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.554595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.554602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.554633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:02.554985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:02.555018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:02.555193Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.555217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:02.555224Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.555280Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:02.555288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.555317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.555331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:02.555788Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.555796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.555835Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.555839Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:29:02.555905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.555913Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:29:02.555924Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:02.555928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.555933Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:02.555937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.555941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:29:02.555948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.555952Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:29:02.555956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:29:02.555968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:29:02.555974Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:29:02.555978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:29:02.556100Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:02.556116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:02.556121Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:29:02.556126Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:29:02.556131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.556147Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:29:02.562968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:29:02.563069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.564197Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:276:2265] Bootstrap 2025-10-24T16:29:02.564428Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:276:2265] Become StateWork (SchemeCache [3:281:2270]) 2025-10-24T16:29:02.564493Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-10-24T16:29:02.564516Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 101 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:28974" scheme: HTTP items { source_prefix: "a" destination_path: "/InvalidRoot/Table" } } } 2025-10-24T16:29:02.564552Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# BAD_REQUEST, error# Check failed: path: '/InvalidRoot/Table', error: root not found, source_location: ydb/core/tx/schemeshard/schemeshard_import__create.cpp:75 2025-10-24T16:29:02.564558Z node 3 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 101 2025-10-24T16:29:02.564590Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-10-24T16:29:02.564640Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:29:02.565190Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-10-24T16:29:02.565234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:29:02.565240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:29:02.565294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:29:02.565312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:02.565317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:288:2277] TestWaitNotification: OK eventTxId 101 >> TImportTests::ShouldSucceedOnSingleShardTable |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |62.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> TRestoreTests::ExportImportWithSchemeCorruption >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTableAndLimitedResources[Raw] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreIndexTableUniformPartitionsCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.102204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.102232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.102237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.102244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.102250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.102254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.102264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.102291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.102419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.102488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.117051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.117072Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.119460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.119485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.119503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.120808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.121034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.121132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.121194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.122333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.122369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.122565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.122577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.122607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.122613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.122619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.122640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.123907Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.141744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.141818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.141870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.141879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.141943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.141958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.142480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.142515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.142558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.142565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.142568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.142573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.143002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.143021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.143027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.143465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.143480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.143486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.143493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.144187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.144660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.144715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.144891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.144915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.144921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.144981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.144988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.145025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.145037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.145486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.145495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... Done: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976720763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:29:02.767088Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976720759, subscribers count# 1 2025-10-24T16:29:02.767113Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976720759 2025-10-24T16:29:02.767121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976720759 2025-10-24T16:29:02.767129Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:02.767134Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720759 2025-10-24T16:29:02.767625Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:02.767646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:02.767652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:507:2456] TestWaitNotification: OK eventTxId 103 2025-10-24T16:29:02.770929Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:02.770976Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 62us result status StatusSuccess 2025-10-24T16:29:02.771072Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:02.771152Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table/ByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72075186233409546 2025-10-24T16:29:02.771230Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table/ByValue/indexImplTable" took 77us result status StatusSuccess 2025-10-24T16:29:02.771492Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table/ByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720761 CreateStep: 350 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 429496729 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 858993459 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 1288490188 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 1717986918 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2147483647 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 2576980377 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 3006477106 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 3435973836 } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 3865470565 } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000\231\231\231\031\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0003333\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000\314\314\314L\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000ffff\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409554 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000\231\231\231\231\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409555 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000233\263\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409556 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000\314\314\314\314\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409557 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\000eff\346\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409558 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409559 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 14 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TImportTests::ShouldRestoreIndexTableSplitPoints [GOOD] >> TImportTests::ShouldRestoreIndexTablePartitioningSettings >> TImportTests::UserSID >> TImportTests::ChangefeedWithPartitioning [GOOD] >> TImportTests::Changefeeds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ExportImportWithMetadataChecksumAbsence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.200072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.200094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.200099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.200106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.200111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.200114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.200120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.200150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.200239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.200289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.211066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.211082Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.213664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.213693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.213717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.215322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.215563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.215662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.215727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.216911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.216951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.217180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.217192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.217226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.217233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.217240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.217265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.218472Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.240293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.240362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.240410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.240417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.240493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.240507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.241062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.241145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.241159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.241163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.241532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.241966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.241987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.241993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.242682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.243137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.243171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.243350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.243375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.243382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.243454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.243462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.243494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.243505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.243984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.243995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-10-24T16:29:02.865947Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.865953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 5 2025-10-24T16:29:02.865977Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 ContentLength: 11 } } 2025-10-24T16:29:02.866095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.866105Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:258: TRestore TProposedWaitParts, opId: 281474976710763:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.866201Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:02.866215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:02.866220Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-10-24T16:29:02.866225Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-10-24T16:29:02.866230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-10-24T16:29:02.866245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-10-24T16:29:02.866848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-10-24T16:29:02.877542Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 281474976710763 2025-10-24T16:29:02.888253Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:02.888275Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'DownloadInfo': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:02.888287Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:281474976710763] GetObject: key# /data_00.csv, range# 0-10 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:64449 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CF0A137C-CA75-4E86-BB02-02B53EECD2F1 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-10 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 11 2025-10-24T16:29:02.890526Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:281474976710763] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 6e3e0a41fdab8add833862f1bd2954c3 Body: 11b } 2025-10-24T16:29:02.890540Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:281474976710763] Content size: processed-bytes# 0, content-length# 11, body-size# 11 2025-10-24T16:29:02.890562Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:281474976710763] Upload rows: count# 1, size# 36 2025-10-24T16:29:02.891055Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:281474976710763] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409548 Status: 0 Info: { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:02.891068Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:281474976710763] Process download info at 'UploadResponse': info# { DataETag: 6e3e0a41fdab8add833862f1bd2954c3 ProcessedBytes: 11 WrittenBytes: 10 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:02.891074Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:281474976710763] Finish: success# 1, error# , writtenBytes# 10, writtenRows# 1 2025-10-24T16:29:02.903150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 770 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:02.903173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710763, tablet: 72075186233409548, partId: 0 2025-10-24T16:29:02.903199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944, message: Source { RawX1: 770 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:02.903215Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710763:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 770 RawX2: 12884904599 } Origin: 72075186233409548 State: 2 TxId: 281474976710763 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-10-24T16:29:02.903230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710763:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.903235Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.903241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710763:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-10-24T16:29:02.903248Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710763:0 129 -> 240 2025-10-24T16:29:02.903284Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710763:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.903732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.903809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.903819Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710763:0 ProgressState 2025-10-24T16:29:02.903832Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:29:02.903836Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:02.903841Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710763:0 progress is 1/1 2025-10-24T16:29:02.903845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:02.903866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: true 2025-10-24T16:29:02.903879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710763 2025-10-24T16:29:02.903885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-10-24T16:29:02.903891Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710763:0 2025-10-24T16:29:02.903896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710763:0 2025-10-24T16:29:02.903917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-10-24T16:29:02.904305Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-10-24T16:29:02.904319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710763 2025-10-24T16:29:02.904328Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:02.904333Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-10-24T16:29:02.904773Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:02.904803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-10-24T16:29:02.904811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:736:2683] TestWaitNotification: OK eventTxId 104 >> TRestoreTests::ShouldHandleOverloadedShard [GOOD] >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TImportTests::ShouldSucceedOnSingleShardTable [GOOD] >> TImportTests::ShouldSucceedOnMultiShardTable >> TRestoreTests::CancelHungOperationShouldSucceed[Raw] >> TImportTests::UserSID [GOOD] >> TImportTests::UnexpectedPermission ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:02.195572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:02.195597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:02.195601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:02.195607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:02.195613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:02.195616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:02.195623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:02.195650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:02.195787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:02.195885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:02.208686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:02.208705Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:02.211038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:02.211061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:02.211081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:02.212247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:02.212449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:02.212549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.212615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:02.213529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.213568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:02.213800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.213809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.213850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:02.213860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.213866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:02.213892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.215039Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:02.232627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:02.232701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.232753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:02.232759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:02.232817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:02.232828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:02.235601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.235653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:02.235712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.235725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:02.235731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:02.235736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:02.236188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.236200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.236205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:02.236539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.236547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.236553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.236559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.237247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:02.237631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:02.237679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:02.237841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.237866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:02.237874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.237942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:02.237949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.237991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.238005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:02.238371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.238377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: 2025-10-24T16:29:03.254405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true Host: localhost:17872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5236996F-2851-44FA-9683-D800CFE2A954 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:29:03.254691Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:29:03.254701Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv.zst 2025-10-24T16:29:03.255078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:17872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B2111E0E-DC7A-4260-A9FC-3A9EC3106DDC amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-10-24T16:29:03.255379Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 9fefc518a77e08ff2e1005d0369e6533 ContentLength: 317 } } 2025-10-24T16:29:03.255806Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:29:03.266586Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.266608Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.266624Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-127 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:17872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0A4D2EB-1536-4E45-A4BF-709C02D96D72 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-127 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-10-24T16:29:03.267468Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-10-24T16:29:03.267486Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-10-24T16:29:03.267504Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 128-255 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:17872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2135B3F8-C0B6-46E7-9515-1ADC108035B3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=128-255 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-10-24T16:29:03.268162Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 128b } 2025-10-24T16:29:03.268175Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 128 2025-10-24T16:29:03.268185Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 256-316 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:17872 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 077BCC8C-0E95-4611-8377-C7BABE51D11F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=256-316 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 317 2025-10-24T16:29:03.268781Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 9fefc518a77e08ff2e1005d0369e6533 Body: 61b } 2025-10-24T16:29:03.268795Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 317, body-size# 61 2025-10-24T16:29:03.268992Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 100, size# 2900 2025-10-24T16:29:03.269755Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.269771Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 9fefc518a77e08ff2e1005d0369e6533 ProcessedBytes: 317 WrittenBytes: 1092 WrittenRows: 100 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.269781Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 1092, writtenRows# 100 2025-10-24T16:29:03.281877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-10-24T16:29:03.281897Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:03.281917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-10-24T16:29:03.281928Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 316 RawX2: 12884904189 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 1092 RowsProcessed: 100 } 2025-10-24T16:29:03.281940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.281944Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.281947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:03.281966Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:03.282013Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.282498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.282567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.282574Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:03.282584Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:03.282587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:03.282590Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:03.282595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:03.282598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:03.282608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:344:2321] message: TxId: 102 2025-10-24T16:29:03.282613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:03.282617Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:03.282620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:03.282645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:03.283152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:03.283172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:403:2373] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ShouldHandleOverloadedShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:01.114099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:01.114123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.114128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:01.114135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:01.114141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:01.114145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:01.114153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:01.114180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:01.114290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:01.114344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:01.129581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:01.129603Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:01.132624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:01.132656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:01.132682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:01.134160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:01.134356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:01.134448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.134510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:01.136121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.136161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:01.136371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.136379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:01.136411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:01.136418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:01.136424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:01.136446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.137575Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:01.157165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:01.157223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.157262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:01.157267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:01.157325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:01.157336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:01.157865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.157905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:01.157948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.157956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:01.157961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:01.157965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:01.158343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.158353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:01.158361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:01.158953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.158963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:01.158967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.158971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:01.159351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:01.159716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:01.159740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:01.159906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:01.159932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:01.159940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.160001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:01.160008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:01.160041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:01.160053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:01.160512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:01.160520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... OCK::HttpServeRead: /data_00.csv / 14786 2025-10-24T16:29:03.343693Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-10-24T16:29:03.343713Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:103] Content size: processed-bytes# 14679, content-length# 14786, body-size# 32 2025-10-24T16:29:03.343732Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-10-24T16:29:03.344771Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14709 WrittenBytes: 11832 WrittenRows: 995 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.344788Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14709 WrittenBytes: 11832 WrittenRows: 995 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.344797Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14720-14751 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:32735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30916848-95A3-4D0A-AB4A-56072CB42F59 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14720-14751 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-10-24T16:29:03.345405Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-10-24T16:29:03.345420Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:103] Content size: processed-bytes# 14709, content-length# 14786, body-size# 32 2025-10-24T16:29:03.345443Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-10-24T16:29:03.346753Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14739 WrittenBytes: 11856 WrittenRows: 997 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.346771Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14739 WrittenBytes: 11856 WrittenRows: 997 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.346781Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14752-14783 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:32735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2F86B480-131D-4B18-812D-A85B1A9AB10B amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14752-14783 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-10-24T16:29:03.347380Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 32b } 2025-10-24T16:29:03.347394Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:103] Content size: processed-bytes# 14739, content-length# 14786, body-size# 32 2025-10-24T16:29:03.347424Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:103] Upload rows: count# 2, size# 68 2025-10-24T16:29:03.348324Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14769 WrittenBytes: 11880 WrittenRows: 999 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.348340Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14769 WrittenBytes: 11880 WrittenRows: 999 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.348349Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:103] GetObject: key# /data_00.csv, range# 14784-14785 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:32735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3A4366E0-7A54-4848-985F-AF304C26CDE3 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=14784-14785 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14786 2025-10-24T16:29:03.349140Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:103] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: a9d13eb7c9fe058d011cb596ff181e02 Body: 2b } 2025-10-24T16:29:03.349151Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:103] Content size: processed-bytes# 14769, content-length# 14786, body-size# 2 2025-10-24T16:29:03.349166Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:103] Upload rows: count# 1, size# 39 2025-10-24T16:29:03.350330Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:103] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14786 WrittenBytes: 11893 WrittenRows: 1000 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:03.350345Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:103] Process download info at 'UploadResponse': info# { DataETag: a9d13eb7c9fe058d011cb596ff181e02 ProcessedBytes: 14786 WrittenBytes: 11893 WrittenRows: 1000 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:03.350351Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:103] Finish: success# 1, error# , writtenBytes# 11893, writtenRows# 1000 2025-10-24T16:29:03.353145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-10-24T16:29:03.353157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:03.353193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-10-24T16:29:03.353202Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 103:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 11893 RowsProcessed: 1000 } 2025-10-24T16:29:03.353212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.353215Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.353218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:03.353221Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 129 -> 240 2025-10-24T16:29:03.353246Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 103:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.353668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.353718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.353723Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-10-24T16:29:03.353735Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:29:03.353739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:03.353744Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-10-24T16:29:03.353747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:03.353752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-10-24T16:29:03.353766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2318] message: TxId: 103 2025-10-24T16:29:03.353772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-10-24T16:29:03.353777Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-10-24T16:29:03.353781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-10-24T16:29:03.353801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:03.354178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:03.354188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:400:2370] TestWaitNotification: OK eventTxId 103 >> TImportTests::UnexpectedPermission [GOOD] >> TImportTests::UnknownSchemeObjectImport >> TImportWithRebootsTests::ShouldSucceedOnBigCompressedTable >> TRestoreTests::ExportImportWithSchemeCorruption [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Raw] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> TImportTests::Changefeeds [GOOD] >> TImportTests::ChangefeedWithTablePermissions >> TReplicationWithRebootsTests::Create >> TImportTests::ShouldRestoreIndexTablePartitioningSettings [GOOD] >> TImportTests::ShouldSucceedOnMultiShardTable [GOOD] >> TImportTests::ShouldSucceedOnManyTables >> TRestoreTests::CancelHungOperationShouldSucceed[Raw] [GOOD] >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Raw] >> TImportTests::UnknownSchemeObjectImport [GOOD] >> TRestoreTests::ShouldCountWrittenBytesAndRows[Raw] [GOOD] >> TRestoreTests::ImportBigEncryptedFile >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldRestoreIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:02.334470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:02.334491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:02.334496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:02.334501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:02.334505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:02.334508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:02.334514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:02.334534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:02.334628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:02.334678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:02.347188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:02.347215Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:02.350717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:02.350758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:02.350788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:02.352324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:02.352506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:02.352616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.352694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:02.353839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.353883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:02.354136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.354150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:02.354188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:02.354197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:02.354205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:02.354232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.355601Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:02.376483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:02.376572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.376631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:02.376639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:02.376707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:02.376722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:02.377438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.377491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:02.377550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.377560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:02.377565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:02.377570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:02.377998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.378009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:02.378017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:02.378330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.378339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:02.378344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.378351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:02.379023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:02.379427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:02.379465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:02.379656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:02.379682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:02.379689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.379756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:02.379763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:02.379801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:02.379814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:02.380284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:02.380292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:357:2336], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976720760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976720761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 300, ApplyTxId: 281474976720762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976720763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:29:04.004478Z node 3 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-10-24T16:29:04.004816Z node 3 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1508: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976720759 Done 2025-10-24T16:29:04.004838Z node 3 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1509: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 281474976720759 Done TBuildInfo{ IndexBuildId: 281474976720759, Uid: 103-0-0, DomainPathId: [OwnerId: 72075186233409546, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409546, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: ByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:357:2336], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976720760, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976720761, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 300, ApplyTxId: 281474976720762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976720763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-10-24T16:29:04.004846Z node 3 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 281474976720759, subscribers count# 1 2025-10-24T16:29:04.004868Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976720759 2025-10-24T16:29:04.004874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976720759 2025-10-24T16:29:04.004883Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:04.004889Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976720759 2025-10-24T16:29:04.005268Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:04.005285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.005293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:507:2456] TestWaitNotification: OK eventTxId 103 2025-10-24T16:29:04.005896Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:04.005943Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/User" took 54us result status StatusSuccess 2025-10-24T16:29:04.006055Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User" PathDescription { Self { Name: "User" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_User_kind_hdd-1" Kind: "common" } StoragePools { Name: "name_User_kind_hdd-2" Kind: "external" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:04.006125Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/User/Table/ByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72075186233409546 2025-10-24T16:29:04.006191Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/User/Table/ByValue/indexImplTable" took 67us result status StatusSuccess 2025-10-24T16:29:04.006393Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/User/Table/ByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976720761 CreateStep: 300 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1073741824 MinPartitionsCount: 2 MaxPartitionsCount: 3 SplitByLoadSettings { Enabled: true } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Raw] [GOOD] >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::UnknownSchemeObjectImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:03.467985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:03.468011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.468017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:03.468023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:03.468029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:03.468033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:03.468042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.468067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:03.468182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:03.468248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:03.479140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:03.479165Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:03.482191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:03.482226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:03.482256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:03.483665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:03.483911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:03.483998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.484056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:03.485203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.485236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:03.485415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.485422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.485446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:03.485451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.485457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:03.485476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.486483Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:03.505689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:03.505765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.505817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:03.505824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:03.505889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:03.505903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:03.508946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.508993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:03.509056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.509069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:03.509075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:03.509081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:03.512544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.512568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:03.512576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:03.513154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.513170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.513176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.513182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:03.513922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:03.514445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:03.514487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:03.514616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.514638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:03.514643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.514692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:03.514698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.514726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:03.514735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:03.515165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.515172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 25-10-24T16:29:04.006632Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:04.006642Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:04.007075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.007088Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.007095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:04.007102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:04.007133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:04.007454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:04.007494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:04.007679Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.007700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:04.007708Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:04.007775Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:04.007782Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:04.007810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:04.007823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:04.008291Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:04.008300Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:04.008336Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:04.008344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-10-24T16:29:04.008398Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.008405Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-10-24T16:29:04.008417Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:04.008421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:04.008426Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-10-24T16:29:04.008429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:04.008433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-10-24T16:29:04.008439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-10-24T16:29:04.008444Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-10-24T16:29:04.008447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-10-24T16:29:04.008460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:29:04.008466Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-10-24T16:29:04.008470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-10-24T16:29:04.008586Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:04.008598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-10-24T16:29:04.008603Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-10-24T16:29:04.008608Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-10-24T16:29:04.008612Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:04.008625Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-10-24T16:29:04.009271Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-10-24T16:29:04.009357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.010406Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [3:276:2265] Bootstrap 2025-10-24T16:29:04.010637Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [3:276:2265] Become StateWork (SchemeCache [3:281:2270]) 2025-10-24T16:29:04.010887Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [3:276:2265] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-10-24T16:29:04.022666Z node 3 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestWaitNotification wait txId: 101 2025-10-24T16:29:04.022737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-10-24T16:29:04.022744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-10-24T16:29:04.022814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 101, at schemeshard: 72057594046678944 2025-10-24T16:29:04.022821Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 101, at schemeshard: 72057594046678944 REQUEST: HEAD /metadata.json HTTP/1.1 HEADERS: Host: localhost:22798 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D8D0D3C5-5D84-40B4-BE9E-18389B242EC2 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /metadata.json / 14 REQUEST: GET /metadata.json HTTP/1.1 HEADERS: Host: localhost:22798 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D0982F05-DFCD-4339-8907-5ED8448B8B73 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /metadata.json / 14 REQUEST: HEAD /scheme.pb HTTP/1.1 HEADERS: Host: localhost:22798 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E83ED6A8-1C34-4B0F-BB77-429C9C1869E1 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 REQUEST: HEAD /create_view.sql HTTP/1.1 HEADERS: Host: localhost:22798 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F89BF55B-1199-4092-B696-9DA99BF23F0E amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 REQUEST: HEAD /create_topic.pb HTTP/1.1 HEADERS: Host: localhost:22798 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D2E72890-0BA4-4112-B755-985424B43E94 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:29:04.035575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.035594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 >> TImportTests::ChangefeedWithTablePermissions [GOOD] >> TImportTests::ShouldSucceedOnManyTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28444, MsgBus: 16588 2025-10-24T16:27:55.840935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825864695562496:2067];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:55.841010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:27:55.844911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020a4/r3tmp/tmpWckxIJ/pdisk_1.dat 2025-10-24T16:27:55.870904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:55.870934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:55.872534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:27:55.877020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564825864695562465:2081] 1761323275840742 != 1761323275840745 2025-10-24T16:27:55.882328Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28444, node 1 2025-10-24T16:27:55.889879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:55.889893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:55.889895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:55.889935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16588 TClient is connected to server localhost:16588 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-10-24T16:27:55.947088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:55.954432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:55.966132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:55.990552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.012879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.026208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:56.160747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868990532284:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.160772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.160840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868990532294:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.160850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.213793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.224226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.235304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.245722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.260564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.274733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.289015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.303171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:27:56.318645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868990533782:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.318674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.318691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868990533787:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.318701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825868990533789:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.318707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:27:56.319255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__o ... xecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:58.439844Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:58.449986Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:58.464294Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:58.492453Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:58.510977Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:58.754096Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826135848939553:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.754118Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.754176Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826135848939562:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.754187Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.765953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.773746Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.783214Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.790463Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.804624Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.818327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.833057Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.847195Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:58.863301Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826135848940425:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.863360Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.863437Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826135848940430:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.863442Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564826135848940431:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.863452Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:58.864212Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:58.874611Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564826135848940434:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-10-24T16:28:58.930884Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564826135848940486:3556] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:59.221292Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:59.362986Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:01.529740Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2981: SessionId: ydb://session/3?node_id=5&id=YzRkMDQ5MDctNmMwNDFkNDMtNWFkODhhNWYtNWE4NGIzNTk=, ActorId: [5:7564826144438876088:2619], ActorState: ExecuteState, TraceId: 01k8bgtmkg4xn7krvyaqp34b4k, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ChangefeedWithTablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:03.018091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:03.018112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.018116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:03.018120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:03.018124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:03.018127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:03.018133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.018153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:03.018236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:03.018279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:03.030505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:03.030527Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:03.033062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:03.033092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:03.033111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:03.034071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:03.034198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:03.034293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.034363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:03.035201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.035232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:03.035440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.035449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.035477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:03.035485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.035490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:03.035514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.036798Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:03.058457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:03.058534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.058586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:03.058594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:03.058664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:03.058681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:03.059223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.059257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:03.059301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.059309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:03.059313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:03.059318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:03.059683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.059694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:03.059700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:03.060100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.060117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.060123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.060129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:03.060753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:03.061215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:03.061270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:03.061447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.061468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:03.061474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.061532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:03.061539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.061573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:03.061584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:03.062017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.062024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710760, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-10-24T16:29:04.417111Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-10-24T16:29:04.417126Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710760, subscribers: 1 2025-10-24T16:29:04.417131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:140:2161] 2025-10-24T16:29:04.418257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710760 2025-10-24T16:29:04.418322Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-10-24T16:29:04.418333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710760 2025-10-24T16:29:04.418341Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:04.418346Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710760 2025-10-24T16:29:04.419394Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:04.419427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.419434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:293:2280] TestWaitNotification: OK eventTxId 101 2025-10-24T16:29:04.420152Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:04.420204Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 61us result status StatusSuccess 2025-10-24T16:29:04.420340Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "eve" ACL: "\n\016\010\001\020\211\004\032\005alice \003\n\017\010\001\020\366\213\001\032\005alice \003\n\014\010\001\020\211\004\032\003bob \003" EffectiveACL: "\n\016\010\001\020\211\004\032\005alice \003\n\017\010\001\020\366\213\001\032\005alice \003\n\014\010\001\020\211\004\032\003bob \003" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 3 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:04.420422Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/updates_feed1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:29:04.420458Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/updates_feed1" took 36us result status StatusSuccess 2025-10-24T16:29:04.420540Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/updates_feed1" PathDescription { Self { Name: "updates_feed1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeCdcStream CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\001\020\211\004\032\005alice \003(\001\n\021\010\001\020\366\213\001\032\005alice \003(\001\n\016\010\001\020\211\004\032\003bob \003(\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 CdcStreamVersion: 1 } ChildrenExist: true } Children { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } CdcStreamDescription { Name: "updates_feed1" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 3 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:04.420602Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/updates_feed1/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:29:04.420624Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/updates_feed1/streamImpl" took 23us result status StatusSuccess 2025-10-24T16:29:04.420710Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/updates_feed1/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710759 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\001\020\211\004\032\005alice \003(\001\n\021\010\001\020\366\213\001\032\005alice \003(\001\n\016\010\001\020\211\004\032\003bob \003(\001" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "updates_feed1" TopicPath: "/MyRoot/Table/updates_feed1/streamImpl" YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 3 ScaleThresholdSeconds: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "my_consumer" } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportTests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:03.198881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:03.198902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.198906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:03.198912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:03.198917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:03.198920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:03.198926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.198947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:03.199064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:03.199115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:03.209808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:03.209827Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:03.212129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:03.212153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:03.212171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:03.213260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:03.213415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:03.213506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.213556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:03.214323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.214352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:03.214503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.214508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.214533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:03.214538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.214541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:03.214557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.215620Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:03.234309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:03.234371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.234414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:03.234421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:03.234475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:03.234485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:03.235005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:03.235070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:03.235080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:03.235083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:03.235373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:03.235649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.235658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.235662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:03.236137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:03.236484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:03.236534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:03.236656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.236672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:03.236676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.236718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:03.236722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.236744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:03.236752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:03.237116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.237124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... 10760] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-10-24T16:29:04.402035Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 12884904206 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.402055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:04.402088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 351 RawX2: 12884904206 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.402104Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 351 RawX2: 12884904206 } Origin: 72075186233409546 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.402123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.402129Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.402133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:04.402140Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710759:0 129 -> 240 2025-10-24T16:29:04.402188Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:04.402992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.403085Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.403096Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-10-24T16:29:04.403111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-10-24T16:29:04.403116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:29:04.403121Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710759:0 progress is 1/1 2025-10-24T16:29:04.403125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:29:04.403130Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-10-24T16:29:04.403145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710759 2025-10-24T16:29:04.403151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-10-24T16:29:04.403158Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710759:0 2025-10-24T16:29:04.403162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710759:0 2025-10-24T16:29:04.403189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:04.403642Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-10-24T16:29:04.403659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710759 2025-10-24T16:29:04.403670Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:04.403676Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-10-24T16:29:04.404080Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:04.414858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 380 RawX2: 12884904228 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.414880Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710760, tablet: 72075186233409547, partId: 0 2025-10-24T16:29:04.414909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944, message: Source { RawX1: 380 RawX2: 12884904228 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.414930Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 281474976710760:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 380 RawX2: 12884904228 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.414945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710760:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.414950Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.414955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710760:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-10-24T16:29:04.414961Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710760:0 129 -> 240 2025-10-24T16:29:04.415006Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 281474976710760:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:04.415500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.415583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.415591Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-10-24T16:29:04.415605Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-10-24T16:29:04.415610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-10-24T16:29:04.415615Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710760:0 progress is 1/1 2025-10-24T16:29:04.415618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-10-24T16:29:04.415623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-10-24T16:29:04.415638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:140:2161] message: TxId: 281474976710760 2025-10-24T16:29:04.415645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-10-24T16:29:04.415651Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710760:0 2025-10-24T16:29:04.415658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710760:0 2025-10-24T16:29:04.415686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:29:04.416134Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-10-24T16:29:04.416149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976710760 2025-10-24T16:29:04.416159Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:04.416164Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:1425: TImport::TTxProgress: OnNotifyResult: txId# 281474976710760 2025-10-24T16:29:04.416566Z node 3 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete 2025-10-24T16:29:04.416585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.416592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:295:2280] TestWaitNotification: OK eventTxId 101 |62.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::CancelAlmostCompleteOperationShouldNotHaveEffect[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:03.709097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:03.709123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.709129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:03.709137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:03.709145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:03.709149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:03.709159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.709181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:03.709275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:03.709328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:03.722812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:03.722833Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:03.725583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:03.725618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:03.725646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:03.727195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:03.727408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:03.727526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.727600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:03.728422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.728458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:03.728758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.728773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.728808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:03.728815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.728821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:03.728839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.729872Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:03.750848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:03.750931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.750985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:03.750995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:03.751070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:03.751086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:03.751959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:03.752065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:03.752080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:03.752086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:03.752518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:03.752902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.752919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.752925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:03.753660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:03.754071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:03.754124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:03.754321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.754348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:03.754356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.754416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:03.754421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.754448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:03.754457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:03.754783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.754789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... hId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:29:04.760051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:29:04.760068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: HEAD /data_00.csv HTTP/1.1 HEADERS: Host: localhost:5782 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0E62DE4B-67E0-4B1C-A0DA-43C31DDBB0C3 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-10-24T16:29:04.760429Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: No response body. } 2025-10-24T16:29:04.760445Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:566: [Import] [s3:102] HeadObject: key# /data_00.csv.zst 2025-10-24T16:29:04.760855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: HEAD /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5782 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 31766BFA-19EC-4F9F-835C-2E4F81DCF836 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-10-24T16:29:04.761217Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: 7443c2f403aa74cff1f199511bd22374 ContentLength: 23 } } FAKE_COORDINATOR: Erasing txId 102 2025-10-24T16:29:04.772139Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:04.793035Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:04.793056Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'DownloadInfo': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:04.793075Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:102] GetObject: key# /data_00.csv.zst, range# 0-22 REQUEST: GET /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5782 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DE1C05BE-0999-4D0C-9541-5E426EB53F0B amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-22 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv.zst / 23 2025-10-24T16:29:04.794008Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:709: [Import] [s3:102] Handle NKikimr::NWrappers::NExternalStorage::TEvGetObjectResponse { Key: null Result: 7443c2f403aa74cff1f199511bd22374 Body: 23b } 2025-10-24T16:29:04.794022Z node 3 :DATASHARD_RESTORE TRACE: import_s3.cpp:726: [Import] [s3:102] Content size: processed-bytes# 0, content-length# 23, body-size# 23 2025-10-24T16:29:04.794053Z node 3 :DATASHARD_RESTORE INFO: import_s3.cpp:869: [Import] [s3:102] Upload rows: count# 1, size# 34 2025-10-24T16:29:04.794608Z node 3 :DATASHARD_RESTORE DEBUG: import_s3.cpp:880: [Import] [s3:102] Handle NKikimr::TEvDataShard::TEvS3UploadRowsResponse { Record: TabletID: 72075186233409546 Status: 0 Info: { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:04.794624Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:102] Process download info at 'UploadResponse': info# { DataETag: 7443c2f403aa74cff1f199511bd22374 ProcessedBytes: 23 WrittenBytes: 8 WrittenRows: 1 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:04.794631Z node 3 :DATASHARD_RESTORE NOTICE: import_s3.cpp:1028: [Import] [s3:102] Finish: success# 1, error# , writtenBytes# 8, writtenRows# 1 2025-10-24T16:29:04.817048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:71: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 102 TxId: 103 2025-10-24T16:29:04.817072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_cancel_tx.cpp:37: Execute cancel tx: opId# 103:0, target opId# 102:0 2025-10-24T16:29:04.817753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:88: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 TEvCancelTxResult for TargetTxId: 102, wait until TargetTxId: 102 TestWaitNotification wait txId: 102 2025-10-24T16:29:04.817834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:29:04.817842Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:29:04.817858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:29:04.817862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:29:04.817905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.817913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:04.817935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.817951Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 311 RawX2: 12884904184 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-10-24T16:29:04.817966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.817971Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.817976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:04.817982Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:04.818014Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:04.818129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.818141Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:04.818151Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:04.818155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:04.818160Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:04.818163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:04.818167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:04.818171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:04.818176Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:04.818180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:04.818202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:04.818408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:29:04.818636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:29:04.818941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.818952Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 102:0 2025-10-24T16:29:04.819086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.819093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:434:2402] 2025-10-24T16:29:04.819106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:04.819110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:434:2402] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 >> TxUsage::WriteToTopic_Demo_46_Table >> DataShardVolatile::DistributedWrite |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TImportWithRebootsTests::CancelShouldSucceedOnSimpleTable [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnSingleChangefeed >> TGRpcStreamingTest::WriteAndFinishWorks |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::WritesDoneFromClient >> THealthCheckTest::TestStateStorageRed [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnIndexedTable [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnDependentView >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: 2025-10-24T16:28:36.626889Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.626948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:36.671526Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-10-24T16:28:36.671821Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:847:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.671986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.672020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-10-24T16:28:36.672456Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:843:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-10-24T16:28:36.672515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-10-24T16:28:36.672530Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0018aa/r3tmp/tmpCo6hKr/pdisk_1.dat 2025-10-24T16:28:36.815311Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:36.870299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.870341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.870458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:36.870470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:36.910784Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:28:36.911245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:28:36.911372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5915, node 1 TClient is connected to server localhost:23765 2025-10-24T16:28:37.540341Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-10-24T16:28:37.545502Z node 1 :HIVE DEBUG: hive_impl.cpp:761: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 2 2025-10-24T16:28:37.545558Z node 1 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-10-24T16:28:37.545570Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 1 2025-10-24T16:28:37.545602Z node 1 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-10-24T16:28:37.546341Z node 1 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(1)::Execute 2025-10-24T16:28:37.546389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:37.546397Z node 1 :HIVE TRACE: hive_domains.cpp:16: Node(1) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2025-10-24T16:28:37.546406Z node 1 :HIVE DEBUG: hive_impl.cpp:2875: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 1) 2025-10-24T16:28:37.546415Z node 1 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [1:1229:2637] 2025-10-24T16:28:37.546422Z node 1 :HIVE DEBUG: hive_impl.cpp:130: HIVE#72057594037968897 TryToDeleteNode(1): waiting 3600.000000s 2025-10-24T16:28:37.547310Z node 1 :HIVE TRACE: hive_impl.cpp:147: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([1:869:2416]) [1:1229:2637] 2025-10-24T16:28:37.547538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:28:37.547551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:28:37.547555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:28:37.547779Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:28:37.547974Z node 1 :HIVE DEBUG: hive_impl.cpp:761: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 2: Status: 2 2025-10-24T16:28:37.547994Z node 1 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(2)::Execute 2025-10-24T16:28:37.548001Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 2 2025-10-24T16:28:37.548133Z node 1 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(2)::Execute 2025-10-24T16:28:37.548163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-10-24T16:28:37.548169Z node 1 :HIVE TRACE: hive_domains.cpp:16: Node(2) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-10-24T16:28:37.548176Z node 1 :HIVE DEBUG: hive_impl.cpp:2875: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(2, 2) 2025-10-24T16:28:37.548183Z node 1 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [1:1234:2642] 2025-10-24T16:28:37.548189Z node 1 :HIVE DEBUG: hive_impl.cpp:130: HIVE#72057594037968897 TryToDeleteNode(2): waiting 3600.000000s 2025-10-24T16:28:37.548244Z node 1 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([1:1354:2717]) [1:1593:2722] 2025-10-24T16:28:37.548277Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-10-24T16:28:37.550296Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-10-24T16:28:37.550320Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-10-24T16:28:37.550370Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-10-24T16:28:37.550377Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-10-24T16:28:37.550403Z node 1 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-10-24T16:28:37.550410Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-10-24T16:28:37.550429Z node 1 :HIVE DEBUG: hive_impl.cpp:1105: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-10-24T16:28:37.550539Z node 1 :HIVE TRACE: hive_impl.cpp:147: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([2:866:2358]) [1:1234:2642] 2025-10-24T16:28:37.550684Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-10-24T16:28:37.551723Z node 1 :HIVE TRACE: hive_impl.cpp:139: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1574:2358]) [1:1641:2728] 2025-10-24T16:28:37.555037Z node 1 :HIVE DEBUG: hive_impl.cpp:166: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [3:1573:2358] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-10-24T16:28:37.555069Z node 1 :HIVE DEBUG: tx__register_node.cpp:21: HIVE#72057594037968897 THive::TTxRegisterNode(3)::Execute 2025-10-24T16:28:37.555098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:28:37.555105Z node 1 :HIVE DEBUG: hive_impl.cpp:386: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-10-24T16:28:37.555109Z node 1 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72057594037968897 ProcessBootQueue (0) 2025-10-24T16:28:37.555127Z node 1 :HIVE TRACE: hive_impl.cpp:369: HIVE#72057594037968897 ProcessBootQueue - sending 2025-10-24T16:28:37.555133Z node 1 :HIVE DEBUG: hive_impl.cpp:386: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-10-24T16:28:37.555137Z node 1 :HIVE DEBUG: hive_impl.cpp:367: HIVE#72057594037968897 ProcessBootQueue (0) 2025-10-24T16:28:37.555150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:28:37.555262Z node 1 :HIVE TRACE: hive_impl.cpp:353: HIVE#72057594037968897 ProcessBootQueue - executing 2025-10-24T16:28:37.555276Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-10-24T16:28:37.555282Z node 1 :HIVE DEBUG: hive_impl.cpp:247: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-10-24T16:28:37.555287Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-10-24T16:28:37.555293Z node 1 :HIVE DEBUG: hive_impl.cpp:327: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-10-24T16:28:37.555392Z node 1 :HIVE DEBUG: hive_impl.cpp:838: HIVE#72057594037968897 TEvInterconn ... 03.254817Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.254861Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.254876Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.254917Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.254927Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.254978Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.254989Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.255036Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.255046Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.255087Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.255098Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.255143Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.255153Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.255205Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:03.255215Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:03.311282Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2025-10-24T16:29:03.311505Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2025-10-24T16:29:03.311540Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2025-10-24T16:29:03.311570Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2025-10-24T16:29:03.311602Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 38 Cookie 38 2025-10-24T16:29:03.311633Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 39 Cookie 39 2025-10-24T16:29:03.311664Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 40 Cookie 40 2025-10-24T16:29:03.311695Z node 33 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 41 Cookie 41 2025-10-24T16:29:03.311792Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312042Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312083Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312144Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312178Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312212Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312246Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312280Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:03.312317Z node 33 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2304, node 33 TClient is connected to server localhost:25487 2025-10-24T16:29:03.384214Z node 33 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:29:03.384232Z node 33 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:29:03.384235Z node 33 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:29:03.384354Z node 33 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-10-24T16:29:03.397328Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.443129Z node 39 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.454259Z node 33 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.465653Z node 40 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.488753Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.534874Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.546700Z node 41 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.558561Z node 38 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:03.613761Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:29:04.299817Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.299889Z node 33 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.299933Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300024Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300048Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300069Z node 38 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300093Z node 39 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300118Z node 40 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:29:04.300141Z node 41 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-39" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 39 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-40" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 40 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-41" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 41 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-39" reason: "YELLOW-7932-1231c6b1-40" reason: "YELLOW-7932-1231c6b1-41" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-33" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-33" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 33 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-33" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-33" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 33 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-33" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-33" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 33 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 33 host: "::1" port: 12001 } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-10-24T16:29:05.860757Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826166040043768:2075];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:05.860772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002da0/r3tmp/tmpCpfPSV/pdisk_1.dat 2025-10-24T16:29:05.895913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:05.920294Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x1139ffbbe000] stream accepted Name# Session ok# true peer# ipv6:[::1]:36430 2025-10-24T16:29:05.921961Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:05.921979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826166040043731:2081] 1761323345860405 != 1761323345860408 2025-10-24T16:29:05.925734Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x1139ffbbe000] facade attach Name# Session actor# [1:7564826166040044272:2261] peer# ipv6:[::1]:36430 2025-10-24T16:29:05.925752Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x1139ffbbe000] facade write Name# Session data# peer# ipv6:[::1]:36430 2025-10-24T16:29:05.925921Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x1139ffbbe000] facade write Name# Session data# peer# ipv6:[::1]:36430 grpc status# (0) message# 2025-10-24T16:29:05.926272Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x1139ffbbe000] write finished Name# Session ok# true peer# ipv6:[::1]:36430 2025-10-24T16:29:05.926312Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-10-24T16:29:05.926364Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x1139ffbbe000] stream done notification Name# Session ok# true peer# ipv6:[::1]:36430 2025-10-24T16:29:05.926372Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x1139ffbbe000] write finished Name# Session ok# true peer# ipv6:[::1]:36430 2025-10-24T16:29:05.926376Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x1139ffbbe000] stream finished Name# Session ok# true peer# ipv6:[::1]:36430 grpc status# (0) message# 2025-10-24T16:29:05.926392Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x1139ffbbe000] deregistering request Name# Session peer# ipv6:[::1]:36430 (finish done) 2025-10-24T16:29:05.926419Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-10-24T16:29:05.965004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:05.965029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:05.966307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TGRpcStreamingTest::SimpleEcho [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> TGRpcStreamingTest::ReadFinish |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-10-24T16:29:06.102326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826171387097565:2065];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:06.102348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:29:06.106480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dbf/r3tmp/tmpwpVbQo/pdisk_1.dat 2025-10-24T16:29:06.143494Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:06.143681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826171387097541:2081] 1761323346102175 != 1761323346102178 2025-10-24T16:29:06.185376Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x31727fbbc000] stream accepted Name# Session ok# true peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189094Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x31727fbbc000] facade attach Name# Session actor# [1:7564826171387098073:2258] peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189113Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x31727fbbc000] facade read Name# Session peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189153Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x31727fbbc000] facade write Name# Session data# peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189289Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x31727fbbc000] facade finish Name# Session peer# ipv6:[::1]:40184 grpc status# (0) message# 2025-10-24T16:29:06.189517Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x31727fbbc000] write finished Name# Session ok# true peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189582Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x31727fbbc000] read finished Name# Session ok# false data# peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189594Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x31727fbbc000] stream done notification Name# Session ok# true peer# ipv6:[::1]:40184 2025-10-24T16:29:06.189598Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x31727fbbc000] stream finished Name# Session ok# true peer# ipv6:[::1]:40184 grpc status# (0) message# 2025-10-24T16:29:06.189613Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x31727fbbc000] deregistering request Name# Session peer# ipv6:[::1]:40184 (finish done) 2025-10-24T16:29:06.189790Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2025-10-24T16:29:06.189798Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2025-10-24T16:29:06.189800Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2025-10-24T16:29:06.207614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:06.207648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:06.208721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:06.214863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-10-24T16:29:06.198853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826172387946688:2139];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:06.199010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002dcd/r3tmp/tmp2vBsgp/pdisk_1.dat 2025-10-24T16:29:06.249458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:06.249490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:06.250356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:06.258478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:06.258939Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:06.259151Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826172387946579:2081] 1761323346196484 != 1761323346196487 2025-10-24T16:29:06.267288Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x5180bfbb7000] stream accepted Name# Session ok# true peer# ipv6:[::1]:51222 2025-10-24T16:29:06.267423Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x5180bfbb7000] facade attach Name# Session actor# [1:7564826172387947118:2263] peer# ipv6:[::1]:51222 2025-10-24T16:29:06.267441Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x5180bfbb7000] facade read Name# Session peer# ipv6:[::1]:51222 2025-10-24T16:29:06.267513Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x5180bfbb7000] read finished Name# Session ok# false data# peer# ipv6:[::1]:51222 2025-10-24T16:29:06.267542Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2025-10-24T16:29:06.267560Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x5180bfbb7000] facade finish Name# Session peer# ipv6:[::1]:51222 grpc status# (9) message# Everything is A-OK 2025-10-24T16:29:06.267689Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x5180bfbb7000] stream done notification Name# Session ok# true peer# ipv6:[::1]:51222 2025-10-24T16:29:06.267712Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone 2025-10-24T16:29:06.267730Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x5180bfbb7000] stream finished Name# Session ok# true peer# ipv6:[::1]:51222 grpc status# (9) message# Everything is A-OK 2025-10-24T16:29:06.267757Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x5180bfbb7000] deregistering request Name# Session peer# ipv6:[::1]:51222 (finish done) >> TGRpcStreamingTest::ClientDisconnects ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-10-24T16:29:06.411145Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826172124768074:2070];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:06.411333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d7e/r3tmp/tmpf4d15t/pdisk_1.dat 2025-10-24T16:29:06.450787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:06.466860Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:06.467142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826172124768043:2081] 1761323346410587 != 1761323346410590 2025-10-24T16:29:06.475208Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x31d6ffbbd000] stream accepted Name# Session ok# true peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475328Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x31d6ffbbd000] facade attach Name# Session actor# [1:7564826172124768584:2261] peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475343Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x31d6ffbbd000] facade read Name# Session peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475463Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x31d6ffbbd000] read finished Name# Session ok# true data# peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475484Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2025-10-24T16:29:06.475489Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x31d6ffbbd000] facade write Name# Session data# peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475581Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x31d6ffbbd000] facade finish Name# Session peer# ipv6:[::1]:46096 grpc status# (0) message# 2025-10-24T16:29:06.475623Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x31d6ffbbd000] write finished Name# Session ok# true peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475696Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x31d6ffbbd000] stream done notification Name# Session ok# true peer# ipv6:[::1]:46096 2025-10-24T16:29:06.475727Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x31d6ffbbd000] stream finished Name# Session ok# true peer# ipv6:[::1]:46096 grpc status# (0) message# 2025-10-24T16:29:06.475751Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x31d6ffbbd000] deregistering request Name# Session peer# ipv6:[::1]:46096 (finish done) 2025-10-24T16:29:06.514624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:06.514668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:06.515754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected >> TRestoreTests::ImportBigEncryptedFile [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnEmptyToken[Zstd] >> TGRpcStreamingTest::ReadFinish [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |62.6%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |62.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> TGRpcStreamingTest::ClientDisconnects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-10-24T16:29:06.911321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826171241766941:2066];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:06.911341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d46/r3tmp/tmpR02XGW/pdisk_1.dat 2025-10-24T16:29:06.954450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:06.963583Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:06.964370Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826171241766916:2081] 1761323346911171 != 1761323346911174 2025-10-24T16:29:06.970374Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x5314bfbbe000] stream accepted Name# Session ok# true peer# ipv6:[::1]:42268 2025-10-24T16:29:06.970459Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x5314bfbbe000] facade attach Name# Session actor# [1:7564826171241767456:2259] peer# ipv6:[::1]:42268 2025-10-24T16:29:06.970464Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x5314bfbbe000] facade read Name# Session peer# ipv6:[::1]:42268 2025-10-24T16:29:06.970483Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x5314bfbbe000] facade finish Name# Session peer# ipv6:[::1]:42268 grpc status# (0) message# 2025-10-24T16:29:06.970586Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x5314bfbbe000] read finished Name# Session ok# false data# peer# ipv6:[::1]:42268 2025-10-24T16:29:06.970592Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 2025-10-24T16:29:06.970592Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x5314bfbbe000] stream done notification Name# Session ok# true peer# ipv6:[::1]:42268 2025-10-24T16:29:06.970598Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x5314bfbbe000] stream finished Name# Session ok# true peer# ipv6:[::1]:42268 grpc status# (0) message# 2025-10-24T16:29:06.970607Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x5314bfbbe000] deregistering request Name# Session peer# ipv6:[::1]:42268 (finish done) 2025-10-24T16:29:07.020949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:07.020971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:07.022017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreTests::ImportBigEncryptedFile [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:03.347703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:03.347724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.347729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:03.347736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:03.347742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:03.347746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:03.347756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:03.347780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:03.347902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:03.347962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:03.363576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:03.363593Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:03.365902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:03.365927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:03.365948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:03.367366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:03.367525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:03.367624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.367692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:03.368604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.368640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:03.368858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.368868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:03.368903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:03.368911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:03.368918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:03.369027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.370233Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:03.391400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:03.391493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.391555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:03.391563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:03.391632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:03.391649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:03.393230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.393278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:03.393342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.393354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:03.393359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:03.393364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:03.393766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.393777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:03.393783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:03.394085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.394094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:03.394099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.394105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:03.394748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:03.395144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:03.395173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:03.395347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:03.395373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:03.395380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.395455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:03.395463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:03.395498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:03.395510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:03.395926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:03.395936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... :29:07.118548Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:07.118555Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 129 -> 240 2025-10-24T16:29:07.118604Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:07.118667Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:29:07.119136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:07.119148Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:29:07.119154Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-10-24T16:29:07.119172Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [8:314:2299] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-10-24T16:29:07.119246Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [8:131:2155], Recipient [8:131:2155]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-10-24T16:29:07.119252Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-10-24T16:29:07.119259Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:07.119267Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-10-24T16:29:07.119280Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:29:07.119285Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:07.119290Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:07.119296Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:07.119299Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:07.119304Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:07.119314Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:340:2317] message: TxId: 102 2025-10-24T16:29:07.119321Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:07.119328Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:07.119332Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:07.119357Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:07.119772Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:29:07.119788Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [8:340:2317] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-10-24T16:29:07.119819Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:07.119825Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:399:2369] 2025-10-24T16:29:07.119886Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [8:401:2371], Recipient [8:131:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:07.119895Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:07.119900Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-10-24T16:29:07.119985Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [8:448:2416], Recipient [8:131:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TestTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-10-24T16:29:07.119990Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:29:07.120004Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TestTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-10-24T16:29:07.120085Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TestTable" took 75us result status StatusSuccess 2025-10-24T16:29:07.120340Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TestTable" PathDescription { Self { Name: "TestTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TestTable" Columns { Name: "Key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:07.147509Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [8:441:2409], Recipient [8:131:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:07.147543Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:07.147548Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnFileWithoutNewLines[Zstd] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-10-24T16:29:07.279132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826177063720593:2139];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:07.279529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002d75/r3tmp/tmpviU7rU/pdisk_1.dat 2025-10-24T16:29:07.323915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:07.337080Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:07.340114Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826177063720492:2081] 1761323347276068 != 1761323347276071 2025-10-24T16:29:07.348563Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x3020ffbba000] stream accepted Name# Session ok# true peer# ipv6:[::1]:42904 2025-10-24T16:29:07.348672Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x3020ffbba000] facade attach Name# Session actor# [1:7564826177063721028:2258] peer# ipv6:[::1]:42904 2025-10-24T16:29:07.352306Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x3020ffbba000] stream done notification Name# Session ok# true peer# ipv6:[::1]:42904 2025-10-24T16:29:07.352373Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2025-10-24T16:29:07.355911Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x3020ffbba000] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-10-24T16:29:07.355935Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x3020ffbba000] deregistering request Name# Session peer# unknown (finish done) 2025-10-24T16:29:07.390367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:07.390403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:07.391113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |62.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/stress_tool/ydb_stress_tool |62.7%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |62.7%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] >> TSequenceReboots::CreateDropRecreate >> TSequenceReboots::CreateSequence >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |62.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TxUsage::WriteToTopic_Demo_27_Query >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnMultiShardTable[Zstd] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TRestoreWithRebootsTests::ShouldSucceedOnSingleShardTable[Zstd] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-10-24T16:29:10.731414Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.732285Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.732359Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:10.732372Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.732383Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:10.732608Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-10-24T16:29:10.732626Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.736557Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:10.736614Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.736889Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:10.736952Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:10.737057Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:10.737111Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-10-24T16:29:10.737519Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:10.737524Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-10-24T16:29:10.737530Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-10-24T16:29:10.737536Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:10.737543Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:10.737548Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-10-24T16:29:10.737551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:10.737560Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:10.737563Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928037][Partition][0][StateIdle] Try persist 2025-10-24T16:29:10.737589Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:10.737622Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:10.737725Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-10-24T16:29:10.750582Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.752073Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.752166Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-10-24T16:29:10.752179Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.752190Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:10.752363Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:402:2357], now have 1 active actors on pipe 2025-10-24T16:29:10.752389Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.753679Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:10.753713Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.753932Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928139] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:10.753970Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:10.754042Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:10.754100Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:410:2328] 2025-10-24T16:29:10.754663Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:10.754677Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:10.754686Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:410:2328] 2025-10-24T16:29:10.754694Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:10.754706Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:10.754714Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:10.754718Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:10.754725Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:10.754728Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:10.754751Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:10.754811Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:10.754931Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:413:2362], now have 1 active actors on pipe 2025-10-24T16:29:10.755214Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:419:2365], now have 1 active actors on pipe 2025-10-24T16:29:10.755307Z node 2 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:10.755338Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:421:2366], now have 1 active actors on pipe 2025-10-24T16:29:10.755424Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [2:419:2365] destroyed 2025-10-24T16:29:10.755441Z node 2 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:10.755513Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928139] server disconnected, pipe [2:421:2366] destroyed 2025-10-24T16:29:10.973879Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.975175Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.975317Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:10.975336Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.975349Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:10.975570Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [3:262:2255], now have 1 active actors on pipe 2025-10-24T16:29:10.975600Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.976279Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 900000 ... ding events. Count 0 2025-10-24T16:29:11.016241Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:11.016359Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:534:2445], now have 1 active actors on pipe 2025-10-24T16:29:11.016607Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [3:540:2448], now have 1 active actors on pipe 2025-10-24T16:29:11.016654Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [3:541:2449], now have 1 active actors on pipe 2025-10-24T16:29:11.016695Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:11.016734Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:542:2449], now have 1 active actors on pipe 2025-10-24T16:29:11.016749Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:11.016776Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:11.027175Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:550:2456], now have 1 active actors on pipe 2025-10-24T16:29:11.035700Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:11.036414Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:11.036621Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:11.036631Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:11.036656Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:11.036709Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:11.036747Z node 3 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:606:2459] 2025-10-24T16:29:11.037412Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-10-24T16:29:11.037669Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-10-24T16:29:11.037706Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-10-24T16:29:11.037752Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-10-24T16:29:11.037781Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-10-24T16:29:11.037786Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-10-24T16:29:11.037792Z node 3 :PERSQUEUE INFO: partition_init.cpp:968: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-10-24T16:29:11.037796Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:11.037802Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:11.037809Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2459] 2025-10-24T16:29:11.037819Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:11.037830Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:11.037838Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:11.037844Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:11.037849Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:11.037854Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:11.037874Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:11.037909Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:11.038113Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [3:540:2448] destroyed 2025-10-24T16:29:11.038165Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928138] server disconnected, pipe [3:541:2449] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> DataShardVolatile::DistributedWriteThenSplit >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] |62.8%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-10-24T16:29:10.722307Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.723571Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.723660Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:10.723673Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.723684Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:10.723892Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2025-10-24T16:29:10.723912Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.727677Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:10.727731Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.728072Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:10.728138Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:10.728295Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:10.728388Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2025-10-24T16:29:10.728987Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:10.729000Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-10-24T16:29:10.729008Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2025-10-24T16:29:10.729017Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:10.729028Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:10.729036Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-10-24T16:29:10.729041Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:10.729055Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:10.729059Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928037][Partition][0][StateIdle] Try persist 2025-10-24T16:29:10.729097Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:10.729155Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:10.729280Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2025-10-24T16:29:10.745632Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.746569Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.746643Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-10-24T16:29:10.746656Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.746667Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-10-24T16:29:10.746836Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2025-10-24T16:29:10.746854Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.747314Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-10-24T16:29:10.747335Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.747540Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928138] Config applied version 2 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-10-24T16:29:10.747567Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:10.747628Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:10.747665Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:407:2326] 2025-10-24T16:29:10.748318Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:10.748328Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-10-24T16:29:10.748335Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:407:2326] 2025-10-24T16:29:10.748344Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:10.748354Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:10.748362Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-10-24T16:29:10.748368Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:10.748374Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:10.748379Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928138][Partition][1][StateIdle] Try persist 2025-10-24T16:29:10.748398Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:10.748447Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-10-24T16:29:10.748571Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2025-10-24T16:29:10.753523Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:10.754496Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:10.754565Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-10-24T16:29:10.754576Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.754585Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:10.754726Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:459:2396], now have 1 active actors on pipe 2025-10-24T16:29:10.754761Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:10.755310Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:10.755340Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:10.755584Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928139] Config applied version 3 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:10.755617Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:10.755680Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:10.755706Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:467:2367] 2025-10-24T16:29:10.756225Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:10.756233Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:10.756239Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:467:2367] 2025-10-24T16:29:10.756246Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:10.756257Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:10.756263Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:10.756267Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:10.756272Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:10.756276Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:10.756291Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:10.756336Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:10.756436Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:470:2401], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-10-24T16:29:10.758192Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:477:2404], now have 1 active actors on pipe 2025-10-24T16:29:10.758327Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [2:480:2405], now have 1 active actors on pipe 2025-10-24T16:29:10.758408Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:481:2405], now have 1 active actors on pipe 2025-10-24T16:29:10.758488Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [2:477:2404] destroyed 2025-10-24T16:29:10.758616Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928138] server disconnected, pipe [2:480:2405] destroyed 2025-10-24T16:29:10.758630Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928139] server disconnected, pipe [2:481:2405] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |62.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TSequenceReboots::CreateMultipleSequencesNoInitialSequenceShard |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |62.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |62.9%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TImportWithRebootsTests::CancelShouldSucceedOnSingleChangefeed [GOOD] >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] >> TSchemeShardSplitBySizeTest::Split10Shards ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-10-24T16:29:12.131641Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.132728Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.132828Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:12.132842Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.132852Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:12.133088Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:262:2255], now have 1 active actors on pipe 2025-10-24T16:29:12.133115Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.136950Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.136998Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.137279Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928037] Config applied version 1 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.137336Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.137450Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.137510Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:270:2226] 2025-10-24T16:29:12.138131Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.138138Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-10-24T16:29:12.138146Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2226] 2025-10-24T16:29:12.138155Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.138165Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.138173Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.138178Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.138192Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.138196Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928037][Partition][0][StateIdle] Try persist 2025-10-24T16:29:12.138235Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.138284Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.138429Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:273:2260], now have 1 active actors on pipe 2025-10-24T16:29:12.150825Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.152002Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.152080Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-10-24T16:29:12.152092Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.152103Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928137] doesn't have tx writes info 2025-10-24T16:29:12.152263Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928137] server connected, pipe [2:403:2358], now have 1 active actors on pipe 2025-10-24T16:29:12.152285Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.153028Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.153116Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.153346Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928137] Config applied version 2 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.153375Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.153441Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.153478Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:411:2329] 2025-10-24T16:29:12.154050Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.154057Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-10-24T16:29:12.154064Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:411:2329] 2025-10-24T16:29:12.154072Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.154082Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.154088Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.154093Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928137][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.154099Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928137][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.154104Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928137][Partition][0][StateIdle] Try persist 2025-10-24T16:29:12.154122Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.154165Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.154279Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928137] server connected, pipe [2:414:2363], now have 1 active actors on pipe 2025-10-24T16:29:12.181005Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.182062Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.182138Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-10-24T16:29:12.182151Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.182162Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-10-24T16:29:12.182335Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [2:463:2399], now have 1 active actors on pipe 2025-10-24T16:29:12.182362Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.183007Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-10-24T16:29:12.183038Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.183257Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928138] Config applied version 3 actor [2:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-10-24T16:29:12.183288Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.183355Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.183392Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:471:2370] 2025-10-24T16:29:12.183970Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:1:Initializer] Start initializi ... pp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:530:2410] 2025-10-24T16:29:12.415966Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.415979Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:12.415987Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2410] 2025-10-24T16:29:12.415997Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.416011Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.416018Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.416023Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.416029Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.416033Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:12.416051Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.416105Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.416244Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:533:2444], now have 1 active actors on pipe 2025-10-24T16:29:12.416483Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [3:539:2447], now have 1 active actors on pipe 2025-10-24T16:29:12.416532Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [3:540:2448], now have 1 active actors on pipe 2025-10-24T16:29:12.416578Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:12.416613Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:12.416624Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:541:2448], now have 1 active actors on pipe 2025-10-24T16:29:12.416652Z node 3 :PERSQUEUE DEBUG: partition.cpp:1016: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-10-24T16:29:12.427074Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:549:2455], now have 1 active actors on pipe 2025-10-24T16:29:12.434396Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.434962Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.435201Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.435215Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:12.435245Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.435313Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.435358Z node 3 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:605:2458] 2025-10-24T16:29:12.435980Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-10-24T16:29:12.436276Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-10-24T16:29:12.436326Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-10-24T16:29:12.436371Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-10-24T16:29:12.436400Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-10-24T16:29:12.436405Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-10-24T16:29:12.436411Z node 3 :PERSQUEUE INFO: partition_init.cpp:968: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-10-24T16:29:12.436416Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.436421Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:12.436430Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:605:2458] 2025-10-24T16:29:12.436439Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.436450Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.436457Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.436463Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.436468Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.436472Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:12.436490Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.436532Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.436719Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928138] server disconnected, pipe [3:540:2448] destroyed 2025-10-24T16:29:12.436734Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [3:539:2447] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneShard [GOOD] >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-10-24T16:29:12.220133Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.221364Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.221610Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:12.221628Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.221639Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:12.221846Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:257:2251], now have 1 active actors on pipe 2025-10-24T16:29:12.221858Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.230864Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.231051Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.232080Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.232277Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.232839Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.232955Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:265:2222] 2025-10-24T16:29:12.233608Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.233619Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-10-24T16:29:12.233628Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:265:2222] 2025-10-24T16:29:12.233637Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.233647Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.233655Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.233661Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928037][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.233673Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928037][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.233678Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928037][Partition][0][StateIdle] Try persist 2025-10-24T16:29:12.233716Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.233783Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.233943Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:268:2256], now have 1 active actors on pipe 2025-10-24T16:29:12.244615Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.245586Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.245659Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-10-24T16:29:12.245672Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.245682Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:12.245841Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:396:2352], now have 1 active actors on pipe 2025-10-24T16:29:12.245862Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.246340Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:12.246369Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.246607Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:12.246637Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.246698Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.246734Z node 2 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:404:2323] 2025-10-24T16:29:12.247314Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.247322Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:12.247329Z node 2 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:404:2323] 2025-10-24T16:29:12.247338Z node 2 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.247348Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.247356Z node 2 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.247361Z node 2 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.247369Z node 2 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.247373Z node 2 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:12.247391Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.247453Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.247570Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:407:2357], now have 1 active actors on pipe 2025-10-24T16:29:12.247869Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [2:413:2360], now have 1 active actors on pipe 2025-10-24T16:29:12.247930Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [2:415:2361], now have 1 active actors on pipe 2025-10-24T16:29:12.247967Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [2:413:2360] destroyed 2025-10-24T16:29:12.248049Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928139] server disconnected, pipe [2:415:2361] destroyed 2025-10-24T16:29:12.451084Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.451903Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.451977Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-10-24T16:29:12.451991Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.452002Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-10-24T16:29:12.452140Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [3:262:2255], now have 1 active actors on pipe 2025-10-24T16:29:12.452161Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.452641Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-10-24T16:29:12.452669Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.452783Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928037] Config applied version 3 actor [3:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { Partition ... EUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-10-24T16:29:12.472554Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:470:2369] 2025-10-24T16:29:12.472560Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.472568Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.472573Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.472576Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928138][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.472579Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928138][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.472582Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928138][Partition][1][StateIdle] Try persist 2025-10-24T16:29:12.472596Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.472634Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.472713Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [3:473:2403], now have 1 active actors on pipe 2025-10-24T16:29:12.475476Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.476145Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.476191Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-10-24T16:29:12.476199Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.476206Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:12.476298Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:522:2439], now have 1 active actors on pipe 2025-10-24T16:29:12.476312Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1276: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-10-24T16:29:12.476675Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:12.476694Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.476840Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1303: [PQ: 72057594037928139] Config applied version 6 actor [3:105:2138] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-10-24T16:29:12.476859Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.476901Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.476923Z node 3 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:530:2410] 2025-10-24T16:29:12.477244Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.477248Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:12.477253Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2410] 2025-10-24T16:29:12.477258Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.477264Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.477268Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.477271Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.477275Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.477280Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:12.477295Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.477328Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.477412Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:533:2444], now have 1 active actors on pipe 2025-10-24T16:29:12.477595Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928037] server connected, pipe [3:539:2447], now have 1 active actors on pipe 2025-10-24T16:29:12.477613Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928138] server connected, pipe [3:540:2448], now have 1 active actors on pipe 2025-10-24T16:29:12.477618Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:541:2448], now have 1 active actors on pipe 2025-10-24T16:29:12.487953Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2722: [PQ: 72057594037928139] server connected, pipe [3:546:2452], now have 1 active actors on pipe 2025-10-24T16:29:12.492968Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2961: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-10-24T16:29:12.493520Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2993: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-10-24T16:29:12.493676Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:913: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-10-24T16:29:12.493686Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-10-24T16:29:12.493708Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-10-24T16:29:12.493756Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-10-24T16:29:12.493785Z node 3 :PERSQUEUE INFO: partition_init.cpp:1095: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:602:2455] 2025-10-24T16:29:12.494140Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-10-24T16:29:12.494293Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-10-24T16:29:12.494319Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-10-24T16:29:12.494347Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-10-24T16:29:12.494366Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-10-24T16:29:12.494369Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-10-24T16:29:12.494373Z node 3 :PERSQUEUE INFO: partition_init.cpp:968: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-10-24T16:29:12.494376Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:81: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-10-24T16:29:12.494380Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:61: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-10-24T16:29:12.494385Z node 3 :PERSQUEUE INFO: partition.cpp:709: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:602:2455] 2025-10-24T16:29:12.494392Z node 3 :PERSQUEUE DEBUG: partition.cpp:723: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-10-24T16:29:12.494399Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:83: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-10-24T16:29:12.494404Z node 3 :PERSQUEUE DEBUG: partition.cpp:4325: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-10-24T16:29:12.494407Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72057594037928139][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:12.494410Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72057594037928139][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:12.494413Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72057594037928139][Partition][2][StateIdle] Try persist 2025-10-24T16:29:12.494426Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:60: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-10-24T16:29:12.494454Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-10-24T16:29:12.494583Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928037] server disconnected, pipe [3:539:2447] destroyed 2025-10-24T16:29:12.494616Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72057594037928138] server disconnected, pipe [3:540:2448] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TSchemeShardSplitByLoad::TableSplitsUpToMaxPartitionsCount >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] >> TxUsage::WriteToTopic_Demo_46_Query >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-10-24T16:27:42.972047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:27:42.984259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:27:42.984277Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:27:42.984580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:27:42.984616Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:138:2159] 2025-10-24T16:27:42.984654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-10-24T16:27:42.985421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:114:2144], Recipient [1:138:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:27:42.993257Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-10-24T16:27:42.993294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-10-24T16:27:42.993387Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-10-24T16:27:42.993393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-10-24T16:27:42.993398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-10-24T16:27:42.993445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-10-24T16:27:42.993452Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-10-24T16:27:42.993460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:199:2159] in generation 2 2025-10-24T16:27:43.029467Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-10-24T16:27:43.033139Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-10-24T16:27:43.033192Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-10-24T16:27:43.033211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-10-24T16:27:43.033215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-10-24T16:27:43.033218Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-10-24T16:27:43.033222Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.033256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.033260Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.033309Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-10-24T16:27:43.033329Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-10-24T16:27:43.033337Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.033342Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-10-24T16:27:43.033347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-10-24T16:27:43.033350Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-10-24T16:27:43.033353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-10-24T16:27:43.033357Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-10-24T16:27:43.033360Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:27:43.033367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:216:2214], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.033371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.033375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:214:2213], serverId# [1:216:2214], sessionId# [0:0:0] 2025-10-24T16:27:43.033696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:105:2138], Recipient [1:138:2159]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 105 RawX2: 4294969434 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-10-24T16:27:43.033701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-10-24T16:27:43.033708Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-10-24T16:27:43.033731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-10-24T16:27:43.033737Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-10-24T16:27:43.033744Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-10-24T16:27:43.033749Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-10-24T16:27:43.033751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-10-24T16:27:43.033755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-10-24T16:27:43.033758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.033799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-10-24T16:27:43.033802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-10-24T16:27:43.033804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit FinishPropose 2025-10-24T16:27:43.033807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.033812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:1] at 9437184 is DelayComplete 2025-10-24T16:27:43.033815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-10-24T16:27:43.033817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-10-24T16:27:43.033819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.033822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-10-24T16:27:43.044439Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-10-24T16:27:43.044461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-10-24T16:27:43.044466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-10-24T16:27:43.044475Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-10-24T16:27:43.044486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-10-24T16:27:43.044562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:138:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.044568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:27:43.044574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-10-24T16:27:43.044587Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287424, Sender [1:105:2138], Recipient [1:138:2159]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-10-24T16:27:43.044591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-10-24T16:27:43.044617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1806: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-10-24T16:27:43.044623Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1821: Execution status for [1000001:1] at 9437184 is Executed 2025-10-24T16:27:43.044629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-10-24T16:27:43.044633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-10-24T16:27:43.046654Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 105 RawX2: 4294969434 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-10-24T16:27:43.046670Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:27:43.046715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:138:2159], Recipient [1:138:2159]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.046720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-10-24T16:27:43.046726Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-10-24T16:27:43.046733Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-10-24T16:27:43.046736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-10-24T16:27:43.046743Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-10-24T16:27:43.046746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [100 ... 7185 dest 9437184 consumer 9437184 txId 521 2025-10-24T16:29:12.141548Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-10-24T16:29:12.141552Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141556Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-10-24T16:29:12.141577Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-10-24T16:29:12.141581Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141585Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-10-24T16:29:12.141599Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-10-24T16:29:12.141603Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141607Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-10-24T16:29:12.141632Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-10-24T16:29:12.141635Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141639Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-10-24T16:29:12.141658Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-10-24T16:29:12.141662Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141666Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-10-24T16:29:12.141684Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-10-24T16:29:12.141688Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141693Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-10-24T16:29:12.141718Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-10-24T16:29:12.141722Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141726Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-10-24T16:29:12.141741Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-10-24T16:29:12.141744Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141749Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-10-24T16:29:12.141791Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-10-24T16:29:12.141795Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141799Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-10-24T16:29:12.141818Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-10-24T16:29:12.141822Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141826Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-10-24T16:29:12.141850Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-10-24T16:29:12.141854Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141857Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-10-24T16:29:12.141880Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-10-24T16:29:12.141884Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141888Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-10-24T16:29:12.141911Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-10-24T16:29:12.141916Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141920Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-10-24T16:29:12.141945Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-10-24T16:29:12.141949Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141953Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-10-24T16:29:12.141976Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-10-24T16:29:12.141980Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.141983Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-10-24T16:29:12.142004Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-10-24T16:29:12.142008Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.142012Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-10-24T16:29:12.161604Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:29:12.161638Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-10-24T16:29:12.161667Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:104:2138], exec latency: 1 ms, propose latency: 2 ms 2025-10-24T16:29:12.161687Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-10-24T16:29:12.161696Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:29:12.161825Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-10-24T16:29:12.161830Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-10-24T16:29:12.161838Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:104:2138], exec latency: 0 ms, propose latency: 1 ms 2025-10-24T16:29:12.161844Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-10-24T16:29:12.161898Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [10:240:2232], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-10-24T16:29:12.161907Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-10-24T16:29:12.161914Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 31 30 31 27 29 23 24 20 30 28 29 31 24 31 11 28 24 31 31 31 31 29 - 20 24 24 - - - 24 - - actual 31 30 31 27 29 23 24 20 30 28 29 31 24 31 11 28 24 31 31 31 31 29 - 20 24 24 - - - 24 - - interm 27 30 27 27 29 23 24 20 30 28 29 28 24 28 11 28 24 28 28 28 28 29 - 20 24 24 - - - 24 - - >> TSchemeShardSplitBySample::EntryOrderDoesNotCount [GOOD] >> TSchemeShardSplitBySample::EdgeSelectionWithSameSizeKeys [GOOD] >> TSchemeShardSplitBySample::HistogramNoResultWhenMedianKeyIsAtBoundary [GOOD] >> TSchemeShardSplitBySample::Histogram [GOOD] >> TSchemeShardSplitBySizeTest::Merge111Shards >> TSchemeShardSplitBySizeTest::SplitShardsWithDecimalKey >> TSchemeShardSplitBySample::FlatlistNoResultWhenMedianKeyIsAtBoundary [GOOD] >> TSchemeShardSplitBySample::FlatList [GOOD] |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |62.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::ConcurrentSplitOneToOne [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:29:12.976678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:12.976708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:12.976719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:12.976728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:12.976735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:12.976739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:12.976749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:12.976773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:12.976903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:12.976973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:12.993353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:29:12.993386Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:12.996809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:12.996850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:12.996916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:13.000610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:13.000891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:13.001050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:13.001199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:13.002475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:13.002532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:13.002826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:13.002837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:13.002880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:13.002890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:13.002896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:13.002932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.004570Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:29:13.026407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:13.026520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.026606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:13.026615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:13.026674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:13.026690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:13.027904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:13.027960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:13.028025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.028036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:13.028042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:13.028048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:13.028624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.028642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:13.028648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:13.029071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.029083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.029101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:13.029109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:13.029786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:13.030228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:13.030281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:13.030505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:13.030526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:13.030533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:13.030610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:29:13.030618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:13.030672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:13.030685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:29:13.031084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:13.031108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... SchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-10-24T16:29:13.706039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: OperationCookie: 102 TabletId: 72075186233409546 2025-10-24T16:29:13.706055Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 102:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:13.706073Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:13.706079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:13.706084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-10-24T16:29:13.706088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:13.706093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-10-24T16:29:13.706100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-10-24T16:29:13.706106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-10-24T16:29:13.706111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-10-24T16:29:13.706152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-10-24T16:29:13.709896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-10-24T16:29:13.709915Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 102:0 Leader for TabletID 72057594046678944 is [2:471:2421] sender: [2:643:2058] recipient: [2:15:2062] 2025-10-24T16:29:13.710328Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5943: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 310 RawX2: 8589936888 } TabletId: 72075186233409546 State: 4 2025-10-24T16:29:13.710351Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-10-24T16:29:13.710774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-10-24T16:29:13.710796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-10-24T16:29:13.710896Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-10-24T16:29:13.711562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6358: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:13.711635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-10-24T16:29:13.712589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-10-24T16:29:13.712605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 TestWaitNotification wait txId: 102 2025-10-24T16:29:13.724163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-10-24T16:29:13.724194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-10-24T16:29:13.724213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-10-24T16:29:13.724218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-10-24T16:29:13.724363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-10-24T16:29:13.724402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-10-24T16:29:13.724408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:653:2575] 2025-10-24T16:29:13.724419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-10-24T16:29:13.724448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-10-24T16:29:13.724453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:653:2575] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 wait until 72075186233409546 is deleted 2025-10-24T16:29:13.724524Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 Deleted tabletId 72075186233409546 2025-10-24T16:29:13.724612Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:13.724703Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 112us result status StatusSuccess 2025-10-24T16:29:13.725011Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnNames: "Value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TSchemeShardSplitBySample::NoResultOnEmptySample [GOOD] >> TSchemeShardSplitBySample::Mixed [GOOD] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::EdgeSelectionWithSameSizeKeys [GOOD] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::Histogram [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::FlatList [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySample::Mixed [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] >> TSchemeShardSplitBySizeTest::Test [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> KqpWorkloadServiceActors::TestPoolFetcher >> TRestoreWithRebootsTests::ShouldFailOnInvalidValue[Zstd] [GOOD] >> TRestoreWithRebootsTests::ShouldFailOnOutboundKey[Raw] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_split_merge/unittest >> TSchemeShardSplitBySizeTest::Test [GOOD] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> TestKinesisHttpProxy::TestPing >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |63.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2025-10-24T16:28:37.416893Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.416985Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.417030Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.417071Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.417106Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.417141Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.419259Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.420225Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.424795Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.424954Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425011Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425067Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425118Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425172Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425298Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425333Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.425370Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425463Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425485Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425515Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425536Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425555Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425656Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425696Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425800Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425884Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425908Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425924Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425941Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.425959Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.426150Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426213Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426245Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426270Z node 5 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426300Z node 6 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426324Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.426355Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.426397Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:607: Handle NActors::TEvInterconnect::TEvListNodes 2025-10-24T16:28:37.426577Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.427734Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.428021Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.428046Z node 4 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.428080Z node 7 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.428099Z node 8 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.443838Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.444929Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.445197Z node 3 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.445327Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.445365Z node 2 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.445944Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.446039Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.446114Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.446174Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.446550Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:636: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-10-24T16:28:37.466831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:37.466864Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded ... waiting for nameservers are connected 2025-10-24T16:28:37.471610Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-10-24T16:28:37.472103Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-10-24T16:28:37.472191Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:20: TTxInitScheme Execute 2025-10-24T16:28:37.472406Z node 1 :NODE_BROKER DEBUG: node_broker_impl.h:239: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-10-24T16:28:37.473481Z node 1 :NODE_BROKER DEBUG: node_broker__init_scheme.cpp:29: TTxInitScheme Complete 2025-10-24T16:28:37.473827Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:19: TTxLoadState Execute 2025-10-24T16:28:37.473933Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:968: [DB] Using default config. 2025-10-24T16:28:37.473959Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1005: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:37.473965Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1031: [DB] Loaded the first approximate epoch start: #1.1 2025-10-24T16:28:37.473984Z node 1 :NODE_BROKER DEBUG: node_broker__load_state.cpp:27: TTxLoadState Complete 2025-10-24T16:28:37.474024Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:84: TTxMigrateState Execute 2025-10-24T16:28:37.474030Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:52: TTxMigrateState ProcessMigrationBatch UpdateNodes left 0, NewVersionUpdateNodes left 0 2025-10-24T16:28:37.474034Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:21: TTxMigrateState FinalizeMigration 2025-10-24T16:28:37.474041Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1318: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:28:37.474060Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:1337: [DB] Update approx epoch start in database: #1.1 2025-10-24T16:28:37.474067Z node 1 :NODE_BROKER NOTICE: node_broker.cpp:1350: [DB] Update main nodes table to: Nodes 2025-10-24T16:28:37.518515Z node 1 :NODE_BROKER DEBUG: node_broker__migrate_state.cpp:95: TTxMigrateState Complete 2025-10-24T16:28:37.518570Z node 1 :NODE_BROKER TRACE: node_broker.cpp:456: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-10-24T16:28:37.518596Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:562: Preparing nodes list cache for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z, approximate epoch start #1.1 nodes=0 expired=0 2025-10-24T16:28:37.518609Z node 1 :NODE_BROKER DEBUG: node_broker.cpp:603: Preparing update nodes log for epoch ##1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z nodes=0 expired=0 removed=0 2025-10-24T16:28:37.518774Z node 1 :NAMESERVICE DEBUG: dynamic_nameserver.cpp:717: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:584:2204] Leader: 1 Dead: 0 Generation: 2 VersionInfo:  } 2025-10-24T16:28:37.518799Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:584:2204], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:28:37.519492Z node 1 :N ... 025-10-24T16:29:15.764821Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.780242Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.780283Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.780306Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5555:7167] 2025-10-24T16:29:15.784627Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9676:11222], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.784721Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.784729Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.784746Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.798771Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.798814Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.798833Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5560:7172] 2025-10-24T16:29:15.803759Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9678:11224], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.803975Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.803985Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.804001Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.817296Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.817335Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.817353Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5565:7177] 2025-10-24T16:29:15.822040Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9680:11226], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.822136Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.822144Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.822161Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.838316Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.838358Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.838375Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5570:7182] 2025-10-24T16:29:15.853440Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9682:11228], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.853541Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.853555Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.853573Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.868553Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.868596Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.868616Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5575:7187] 2025-10-24T16:29:15.873462Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9684:11230], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.873552Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.873561Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.873578Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.886756Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.886794Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.886812Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5580:7192] 2025-10-24T16:29:15.891934Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9686:11232], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.892014Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.892022Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.892040Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.906813Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.906855Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.906872Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5585:7197] 2025-10-24T16:29:15.912039Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9688:11234], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.912150Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.912159Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.912177Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.925781Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.925822Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.925842Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5590:7202] 2025-10-24T16:29:15.931716Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9690:11236], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.931803Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.931809Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.931823Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.944849Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.944887Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.944905Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5595:7207] 2025-10-24T16:29:15.949796Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 269877761, Sender [1:9692:11238], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:15.949881Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 272039936, Sender [1:604:2216], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-10-24T16:29:15.949891Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:249: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-10-24T16:29:15.949908Z node 1 :NODE_BROKER TRACE: node_broker.cpp:423: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-10-24T16:29:15.962816Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:245: StateWork, received event# 2146435074, Sender [0:0:0], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.962850Z node 1 :NODE_BROKER TRACE: node_broker_impl.h:261: StateWork, processing event TEvPrivate::TEvProcessSubscribersQueue 2025-10-24T16:29:15.962873Z node 1 :NODE_BROKER TRACE: node_broker.cpp:730: Send TEvUpdateNodes v2 -> v3 to [1:5600:7212] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |63.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TRestoreWithRebootsTests::CancelShouldSucceed[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-10-24T16:28:57.494474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:57.494501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:57.494506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:57.494514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:57.494521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:57.494526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:57.494536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:57.494564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:57.494682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:57.494751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:57.506667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-10-24T16:28:57.506687Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:57.509185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:57.509209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:57.509229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:57.510675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:57.510902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:57.511013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.511083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:57.512081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:57.512119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:57.512350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:57.512359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:57.512395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:57.512403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:57.512409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:57.512433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.513743Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:246:2058] recipient: [1:15:2062] 2025-10-24T16:28:57.530806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:57.530892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.530954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:57.530962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:57.531024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:57.531036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:57.531734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.531777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:57.531836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.531860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:57.531867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:57.531871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:57.532245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.532256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:57.532262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:57.532609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.532616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:57.532620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.532625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:57.533263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:57.533802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:57.533868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:57.534078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:57.534118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:28:57.534127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.534205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-10-24T16:28:57.534215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:57.534272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:28:57.534287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-10-24T16:28:57.534849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:57.534859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme ... ard.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-10-24T16:29:18.198347Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-10-24T16:29:18.198354Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-10-24T16:29:18.198370Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-10-24T16:29:18.198495Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:587: [Import] [s3:1003] Handle NKikimr::NWrappers::NExternalStorage::TEvHeadObjectResponse { Key: null Result: HeadObjectResult { ETag: a3ed28bfb53c9214f635c51ed6b618c4 ContentLength: 14 } } 2025-10-24T16:29:18.198580Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: (empty maybe) ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:18.199105Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.199117Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:324: TRestore TAborting, opId: 1003:0 ProgressState at tablet72057594046678944 2025-10-24T16:29:18.199125Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_backup_restore_common.h:351: TRestore Abort, on datashard: 72075186233409546, opId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.199278Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-10-24T16:29:18.199540Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:658: [Import] [s3:1003] Handle NKikimr::TEvDataShard::TEvS3DownloadInfo { Info: { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } } 2025-10-24T16:29:18.199552Z node 68 :DATASHARD_RESTORE NOTICE: import_s3.cpp:673: [Import] [s3:1003] Process download info at 'DownloadInfo': info# { DataETag: a3ed28bfb53c9214f635c51ed6b618c4 ProcessedBytes: 0 WrittenBytes: 0 WrittenRows: 0 ProcessedChecksumState: DownloadState: } 2025-10-24T16:29:18.199566Z node 68 :DATASHARD_RESTORE DEBUG: import_s3.cpp:577: [Import] [s3:1003] GetObject: key# /data_00.csv, range# 0-13 2025-10-24T16:29:18.199883Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269551625 REQUEST: GET /data_00.csv HTTP/1.1 HEADERS: Host: localhost:31889 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2CB96B23-3785-4DC5-A635-6B8021656325 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-13 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /data_00.csv / 14 2025-10-24T16:29:18.212436Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6705: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 6 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 249 } } 2025-10-24T16:29:18.212462Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:18.212496Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 6 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 249 } } 2025-10-24T16:29:18.212512Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:109: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: ERROR Error { Kind: WRONG_SHARD_STATE Reason: "Interrupted Restore operation [5000004:1003] while waiting to finish at 72075186233409546" } TxId: 1003 ExecLatency: 6 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 249 } } 2025-10-24T16:29:18.212671Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 292057778451 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:18.212677Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-10-24T16:29:18.212693Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 337 RawX2: 292057778451 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:18.212705Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:233: TRestore TAborting, opId: 1003:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 337 RawX2: 292057778451 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 OpResult { Success: false Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-10-24T16:29:18.212720Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Aborting, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:18.212725Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.212730Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-10-24T16:29:18.212736Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 133 -> 240 2025-10-24T16:29:18.212775Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:116: Unable to make a bill: kind# TRestore, opId# 1003:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:18.213277Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.213463Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.213532Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.213540Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-10-24T16:29:18.213555Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-10-24T16:29:18.213559Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:29:18.213564Z node 68 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-10-24T16:29:18.213567Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:29:18.213571Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-10-24T16:29:18.213577Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-10-24T16:29:18.213583Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-10-24T16:29:18.213587Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1003:0 2025-10-24T16:29:18.213611Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestWaitNotification wait txId: 1003 2025-10-24T16:29:18.214234Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-10-24T16:29:18.214244Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 TestWaitNotification wait txId: 1004 2025-10-24T16:29:18.214259Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-10-24T16:29:18.214262Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-10-24T16:29:18.214327Z node 68 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-10-24T16:29:18.214349Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:29:18.214354Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [68:490:2461] 2025-10-24T16:29:18.214368Z node 68 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-10-24T16:29:18.214381Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-10-24T16:29:18.214385Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [68:490:2461] TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 >> KqpLimits::QueryExecTimeout [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_restore/unittest >> TImportWithRebootsTests::CancelShouldSucceedOnSingleTableWithUniqueIndex [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:28:59.756972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:59.756992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.756996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:59.757003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:59.757008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:59.757011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:59.757020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:59.757031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:59.757117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:59.757169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:59.775645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:28:59.775671Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:59.775754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.778043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:59.778120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:59.778138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:59.780656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:59.780721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:59.780800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.780864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:59.781634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.781674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:59.781946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:59.781956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:59.781978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:59.781985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:59.781991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:59.782007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:28:59.783161Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:28:59.802201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:59.802258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.802294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:59.802299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:59.802323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:59.802332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:59.803365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.803397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:59.803455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.803463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:59.803470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:59.803475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:59.804666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.804679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:59.804685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:59.805092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.805101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:59.805104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:59.805108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:59.805513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:59.806159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:59.806183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:59.806310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:59.806331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... __operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1000, ready parts: 0/1, is published: true 2025-10-24T16:29:18.598909Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1000:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1000 msg type: 269090816 2025-10-24T16:29:18.598943Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1000, partId: 4294967295, tablet: 72057594046316545 2025-10-24T16:29:18.598991Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 FAKE_COORDINATOR: Add transaction: 1000 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1000 at step: 5000002 2025-10-24T16:29:18.599072Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599095Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1000 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 296352745584 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599104Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 1000:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599134Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1000:0 128 -> 240 2025-10-24T16:29:18.599162Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-10-24T16:29:18.599172Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:29:18.599222Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 FAKE_COORDINATOR: Erasing txId 1000 2025-10-24T16:29:18.599656Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599664Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:18.599696Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1000, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-10-24T16:29:18.599710Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599715Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:212:2212], at schemeshard: 72057594046678944, txId: 1000, path id: 1 2025-10-24T16:29:18.599720Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [69:212:2212], at schemeshard: 72057594046678944, txId: 1000, path id: 2 2025-10-24T16:29:18.599772Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1000:0, at schemeshard: 72057594046678944 2025-10-24T16:29:18.599780Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1000:0 ProgressState 2025-10-24T16:29:18.599796Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-10-24T16:29:18.599800Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-10-24T16:29:18.599806Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1000:0 progress is 1/1 2025-10-24T16:29:18.599810Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-10-24T16:29:18.599827Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1000, ready parts: 1/1, is published: false 2025-10-24T16:29:18.599834Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1000 ready parts: 1/1 2025-10-24T16:29:18.599839Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1000:0 2025-10-24T16:29:18.599844Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1000:0 2025-10-24T16:29:18.599870Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-10-24T16:29:18.599876Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1000, publications: 2, subscribers: 0 2025-10-24T16:29:18.599881Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-10-24T16:29:18.599885Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1000, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-10-24T16:29:18.599989Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-10-24T16:29:18.600000Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 1000 2025-10-24T16:29:18.600005Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1000 2025-10-24T16:29:18.600013Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-10-24T16:29:18.600018Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-10-24T16:29:18.600149Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-10-24T16:29:18.600161Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1000 2025-10-24T16:29:18.600165Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1000 2025-10-24T16:29:18.600170Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1000, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-10-24T16:29:18.600174Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-10-24T16:29:18.600182Z node 69 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1000, subscribers: 0 2025-10-24T16:29:18.601593Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 2025-10-24T16:29:18.601656Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1000 TestModificationResult got TxId: 1000, wait until txId: 1000 2025-10-24T16:29:18.601836Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:130: TImport::TTxCreate: DoExecute 2025-10-24T16:29:18.601873Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:131: Message: TxId: 1002 DatabaseName: "/MyRoot" Request { ImportFromS3Settings { endpoint: "localhost:3326" scheme: HTTP items { source_prefix: "" destination_path: "/MyRoot/Table" } } } 2025-10-24T16:29:18.601942Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:254: TImport::TTxCreate: Reply: status# SUCCESS, error# 2025-10-24T16:29:18.601964Z node 69 :IMPORT TRACE: schemeshard_import__create.cpp:255: Message: TxId: 1002 Response { Entry { Id: 1002 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:3326" scheme: HTTP items { source_prefix: "" destination_path: "/MyRoot/Table" } } StartTime { } } } 2025-10-24T16:29:18.603773Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:238: TImport::TTxCreate: DoComplete 2025-10-24T16:29:18.603817Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:372: TImport::TTxProgress: DoExecute 2025-10-24T16:29:18.603824Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:880: TImport::TTxProgress: Resume: id# 1002, itemIdx# (empty maybe) 2025-10-24T16:29:18.603843Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:906: TImport::TTxProgress: Resume: info# { Id: 1002 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] UserSID: '(empty maybe)' State: Cancelled Issue: 'Cancelled manually' Items: 1 }, item# { Idx: 0 DstPathName: '/MyRoot/Table' DstPathId: State: GetScheme SubState: AllocateTxId WaitTxId: 0 SrcPath: SrcPrefix: Issue: '' } 2025-10-24T16:29:18.603969Z node 69 :IMPORT DEBUG: schemeshard_import__create.cpp:396: TImport::TTxProgress: DoComplete TestWaitNotification wait txId: 1002 2025-10-24T16:29:18.604015Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:29:18.604022Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:29:18.604075Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:83: NotifyTxCompletion import in-flight, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:29:18.604080Z node 69 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:91: NotifyTxCompletion, import is ready to notify, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:29:18.604095Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:29:18.604101Z node 69 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [69:319:2308] TestWaitNotification: OK eventTxId 1002 >> TReplicationWithRebootsTests::Create [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> TestKinesisHttpProxy::TestRequestBadJson ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 6264, MsgBus: 29715 2025-10-24T16:27:59.706650Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564825885094708486:2073];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:27:59.706668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002071/r3tmp/tmpZ6ZFha/pdisk_1.dat 2025-10-24T16:27:59.768818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:27:59.778403Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6264, node 1 2025-10-24T16:27:59.787038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:27:59.787052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:27:59.787054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:27:59.787090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29715 2025-10-24T16:27:59.808833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:27:59.808867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:27:59.809939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:27:59.843747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:27:59.856212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.876232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.893799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:27:59.903967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:00.034810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-10-24T16:28:00.096610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825889389677378:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.096643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.096749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825889389677388:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.096760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.159345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.166289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.179131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.186149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.193337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.207701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.221841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.236669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:00.256670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825889389678250:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.256717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825889389678255:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.256723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.256823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564825889389678257:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.256848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:00.257671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 7205759404664 ... lient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:28:13.484823Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:28:13.491843Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:28:13.500270Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:13.531884Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-10-24T16:28:13.542214Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:13.583379Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:13.599914Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) waiting... 2025-10-24T16:28:13.911748Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825942727562585:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:13.911775Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:13.911931Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825942727562595:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:13.911941Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:13.945176Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.010776Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.031939Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.049546Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.065990Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.086375Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.100733Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.124317Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:686) 2025-10-24T16:28:14.154242Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825947022530757:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:14.154271Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:14.154403Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825947022530763:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:14.154420Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7564825947022530762:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:14.154429Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:28:14.155554Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:28:14.161165Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7564825947022530766:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-10-24T16:28:14.229959Z node 5 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [5:7564825947022530818:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:28:14.240379Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-10-24T16:28:18.240274Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7564825942727561032:2097];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:28:18.240343Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-10-24T16:28:28.288066Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7727: Cannot get console configs 2025-10-24T16:28:28.292035Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=MWE2YTlmZmMtM2YxZGNmMzAtNTFjMTYzOGItMTM5Mjg5OA== >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:29:04.187054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:29:04.187080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:04.187085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:29:04.187093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:29:04.187100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:29:04.187105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:29:04.187114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:29:04.187129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:29:04.187283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:29:04.187348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:29:04.206531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:29:04.206561Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:04.206648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:29:04.212393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:29:04.212483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:29:04.212525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:29:04.214856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:29:04.214956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:29:04.215053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.215116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:29:04.217101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:04.217167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:29:04.217478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:29:04.217495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:29:04.217519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:29:04.217528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:29:04.217535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:29:04.217557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:29:04.219121Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:29:04.236833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:29:04.236914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.236963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:29:04.236970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:29:04.237006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:29:04.237021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:04.237822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.237884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:29:04.237950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.237962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:29:04.237968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:29:04.237974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:29:04.238443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.238456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:29:04.238462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:29:04.238912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.238928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:29:04.238935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:29:04.238943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:29:04.239559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:04.240160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:29:04.240219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:29:04.240420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:29:04.240446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... ation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-10-24T16:29:19.593506Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [55:213:2213], Recipient [55:129:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-10-24T16:29:19.593522Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-10-24T16:29:19.593547Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.593564Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.593570Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:29:19.593577Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-10-24T16:29:19.593583Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-10-24T16:29:19.593607Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:29:19.593992Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [55:213:2213], Recipient [55:129:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2025-10-24T16:29:19.594005Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-10-24T16:29:19.594019Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.594033Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.594040Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-10-24T16:29:19.594045Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-10-24T16:29:19.594051Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-10-24T16:29:19.594068Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-10-24T16:29:19.594074Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-10-24T16:29:19.594541Z node 55 :TX_PROXY INFO: describe.cpp:354: Actor# [55:404:2368] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-10-24T16:29:19.594702Z node 55 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-10-24T16:29:19.594715Z node 55 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2025-10-24T16:29:19.594773Z node 55 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409546] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-10-24T16:29:19.594794Z node 55 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409546][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-10-24T16:29:19.594805Z node 55 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409546][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2025-10-24T16:29:19.595178Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:29:19.595415Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.595424Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:29:19.595710Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-10-24T16:29:19.595721Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-10-24T16:29:19.596022Z node 55 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409546][TxDiscoveryTargetsResult] Complete TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-10-24T16:29:19.596092Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-10-24T16:29:19.596102Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-10-24T16:29:19.596170Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [55:421:2385], Recipient [55:129:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:19.596178Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-10-24T16:29:19.596183Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-10-24T16:29:19.596214Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [55:418:2382], Recipient [55:129:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1002 2025-10-24T16:29:19.596219Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-10-24T16:29:19.596230Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-10-24T16:29:19.596250Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-10-24T16:29:19.596256Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [55:419:2383] 2025-10-24T16:29:19.596282Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [55:421:2385], Recipient [55:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:19.596287Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:19.596291Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2025-10-24T16:29:19.596349Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [55:422:2386], Recipient [55:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:29:19.596354Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:29:19.596365Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:19.596450Z node 55 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 82us result status StatusSuccess 2025-10-24T16:29:19.596587Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-10-24T16:29:16.612261Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826211825830511:2153];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:16.612302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:29:16.622896Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564826211717083924:2083];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:16.623059Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:29:16.632022Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/0020b7/r3tmp/tmpvziy0o/pdisk_1.dat 2025-10-24T16:29:16.636211Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:29:16.640705Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-10-24T16:29:16.696056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:16.703036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:16.703056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:16.704781Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-10-24T16:29:16.705168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:16.713803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:16.713828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:16.717489Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:16.717142Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25703, node 1 2025-10-24T16:29:16.722133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:16.743794Z INFO: TTestServer started on Port 22030 GrpcPort 25703 2025-10-24T16:29:16.744178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/fxx3/0020b7/r3tmp/yandexOKfdOk.tmp 2025-10-24T16:29:16.744192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/fxx3/0020b7/r3tmp/yandexOKfdOk.tmp 2025-10-24T16:29:16.744262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/fxx3/0020b7/r3tmp/yandexOKfdOk.tmp 2025-10-24T16:29:16.744287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22030 PQClient connected to localhost:25703 === TenantModeEnabled() = 1 === Init PQ - start server on port 25703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-10-24T16:29:16.792926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-10-24T16:29:16.792993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-10-24T16:29:16.793069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-10-24T16:29:16.793075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-10-24T16:29:16.793140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-10-24T16:29:16.793152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-10-24T16:29:16.794073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-10-24T16:29:16.794136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-10-24T16:29:16.794184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-10-24T16:29:16.794190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-10-24T16:29:16.794192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-10-24T16:29:16.794194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 2 -> 3 2025-10-24T16:29:16.794747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-10-24T16:29:16.794758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-10-24T16:29:16.794761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 3 -> 128 2025-10-24T16:29:16.795143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-10-24T16:29:16.795151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-10-24T16:29:16.795154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-10-24T16:29:16.795160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-10-24T16:29:16.795752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:29:16.795971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-10-24T16:29:16.795978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-10-24T16:29:16.795982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-10-24T16:29:16.796164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-10-24T16:29:16.796216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-10-24T16:29:16.796796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1761323356842, transactions count in step: 1, at schemeshard: 72057594046644480 2025-10-24T16:29:16.796828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1761323356842 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-10-24T16:29:16.796832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-10-24T16:29:16.796906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 128 -> 240 2025-10-24T16:29:16.796914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at ta ... tion 0 offset 2 partNo 0 count 1 size 431 2025-10-24T16:29:19.936422Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7564826225646142869:2379] 2025-10-24T16:29:19.936454Z node 3 :PERSQUEUE DEBUG: partition.cpp:2168: [72075186224037893][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-10-24T16:29:19.936478Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037893][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-10-24T16:29:19.936491Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037893][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-10-24T16:29:19.936507Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037893][Partition][0][StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-10-24T16:29:19.936544Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:19.936548Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:19.936550Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:19.936566Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2025-10-24T16:29:19.936579Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-10-24T16:29:19.936611Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-10-24T16:29:19.937166Z :DEBUG: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 11 } 2025-10-24T16:29:19.937178Z :DEBUG: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session: acknoledged message 1 2025-10-24T16:29:19.937284Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 431 2025-10-24T16:29:20.013987Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.014006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.014009Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.027775Z :INFO: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session will now close 2025-10-24T16:29:20.027801Z :DEBUG: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session: aborting 2025-10-24T16:29:20.027995Z :INFO: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session: gracefully shut down, all writes complete 2025-10-24T16:29:20.028007Z :DEBUG: [] MessageGroupId [123] SessionId [123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0] Write session: destroy 2025-10-24T16:29:20.028319Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.028335Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.028339Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.028368Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.028370Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.028372Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.028774Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: 123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0 grpc read done: success: 0 data: 2025-10-24T16:29:20.028781Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: 123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0 grpc read failed 2025-10-24T16:29:20.028790Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: 123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0 grpc closed 2025-10-24T16:29:20.028792Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: 123|9e47833e-101ec04d-ad2e6bb4-ebc86092_0 is DEAD 2025-10-24T16:29:20.029026Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-10-24T16:29:20.035942Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2747: [PQ: 72075186224037893] server disconnected, pipe [3:7564826225646143160:2404] destroyed 2025-10-24T16:29:20.035972Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037893][Partition][0][StateIdle] TPartition::DropOwner. 2025-10-24T16:29:20.035983Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.035988Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.035991Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.115947Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.115969Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.115972Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.132166Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.132180Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.132186Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.132207Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.132209Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.132211Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.169201Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [3:7564826229941110475:2411], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-10-24T16:29:20.170376Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2500: SessionId: ydb://session/3?node_id=3&id=ZDY4OTRlZmYtZDlkNDk1YjktOWViOTllMzgtNjg5YTY5ZTg=, ActorId: [3:7564826229941110468:2407], ActorState: ExecuteState, TraceId: 01k8bgv7wz7nr5ae56k8maptqe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-10-24T16:29:20.170537Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-10-24T16:29:20.219264Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.219281Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.219284Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.230674Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.230689Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.230692Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.230714Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.230716Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.230718Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.319630Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.319646Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.319649Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037891][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.331322Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.331343Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.331346Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037893][Partition][0][StateIdle] Try persist 2025-10-24T16:29:20.331377Z node 3 :PERSQUEUE DEBUG: partition.cpp:2341: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-10-24T16:29:20.331379Z node 3 :PERSQUEUE DEBUG: partition.cpp:2350: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-10-24T16:29:20.331386Z node 3 :PERSQUEUE DEBUG: partition.cpp:2373: [72075186224037889][Partition][0][StateIdle] Try persist >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |63.0%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] Test command err: 2025-10-24T16:29:15.699574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7564826208033392064:2180];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:15.699608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:29:15.732353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002479/r3tmp/tmpg4PAGl/pdisk_1.dat 2025-10-24T16:29:15.846303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-10-24T16:29:15.846344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-10-24T16:29:15.847205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-10-24T16:29:15.859069Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:29:15.864195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7564826208033391912:2081] 1761323355692759 != 1761323355692762 2025-10-24T16:29:15.871942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 5147, node 1 2025-10-24T16:29:15.888099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-10-24T16:29:15.888114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-10-24T16:29:15.888116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-10-24T16:29:15.888164Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-10-24T16:29:15.943032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:29:15.946150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-10-24T16:29:16.233208Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-10-24T16:29:16.235930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7564826212328359855:2313], Start check tables existence, number paths: 2 2025-10-24T16:29:16.236394Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:225: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ== (tmp dir name: 81a1765d-43d5-50c7-31fa-c7bde166b922) 2025-10-24T16:29:16.236506Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:229: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: unknown state, session actor bootstrapped 2025-10-24T16:29:16.236519Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-10-24T16:29:16.236522Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-10-24T16:29:16.237756Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7564826212328359855:2313], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-10-24T16:29:16.237768Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7564826212328359855:2313], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-10-24T16:29:16.237773Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7564826212328359855:2313], Successfully finished 2025-10-24T16:29:16.237784Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-10-24T16:29:16.238166Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-10-24T16:29:16.244255Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-10-24T16:29:16.245287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-10-24T16:29:16.246192Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-10-24T16:29:16.246517Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-10-24T16:29:16.247820Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-10-24T16:29:16.299792Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-10-24T16:29:16.301074Z node 1 :TX_PROXY ERROR: schemereq.cpp:590: Actor# [1:7564826212328359935:2332] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-10-24T16:29:16.301119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7564826212328359884:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-10-24T16:29:16.304008Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826212328359942:2338], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-10-24T16:29:16.304394Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7564826212328359942:2338], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-10-24T16:29:16.308008Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2714: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: ReadyState, Session closed due to explicit close event 2025-10-24T16:29:16.308040Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2876: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-10-24T16:29:16.308043Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2937: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-10-24T16:29:16.308048Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2949: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: unknown state, Cleanup temp tables: 0 2025-10-24T16:29:16.308070Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3040: SessionId: ydb://session/3?node_id=1&id=Yjc1MDVkZWMtODBjYTE5NGYtNjhmNGUwM2QtYjZhYmJjNQ==, ActorId: [1:7564826212328359859:2317], ActorState: unknown state, Session actor destroyed test_client.cpp: SetPath # /home/runner/.ya/build/build_root/fxx3/002479/r3tmp/tmpDyuSn7/pdisk_1.dat 2025-10-24T16:29:16.559427Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7564826213527280581:2068];send_to=[0:7307199536658146131:7762515]; 2025-10-24T16:29:16.559452Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-10-24T16:29:16.575490Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-10-24T16:29:16.576912Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12615, node 2 2025-10-24T16:29:16.581296Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7564826213527280544:2081] 1761323356559029 != 1761323356559032 2025-10-24T16:29:16.585791Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is ... orState: ReadyState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, received request, proxyRequestId: 5 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: ALTER RESOURCE POOL default SET ( QUERY_MEMORY_LIMIT_PERCENT_PER_NODE=1 ); rpcActor: [6:7564826229873521856:2375] database: Root databaseId: /Root pool id: default 2025-10-24T16:29:20.986779Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:266: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ReadyState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, request placed into pool from cache: default 2025-10-24T16:29:20.986800Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:632: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Sending CompileQuery request 2025-10-24T16:29:21.021163Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) 2025-10-24T16:29:21.023260Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7564826229873521822:2327], DatabaseId: /Root, PoolId: default, Got watch notification 2025-10-24T16:29:21.023291Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:476: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7564826229873521822:2327], DatabaseId: /Root, PoolId: default, Pool config has changed, queue size: -1, in flight limit: -1 2025-10-24T16:29:21.023342Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2043: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-10-24T16:29:21.023391Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2307: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, txInfo Status: Committed Kind: Pure TotalDuration: 3.176 ServerDuration: 3.137 QueriesCount: 2 2025-10-24T16:29:21.023432Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2466: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-10-24T16:29:21.023477Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2876: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-10-24T16:29:21.023480Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2937: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, EndCleanup, isFinal: 1 2025-10-24T16:29:21.023492Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2669: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: ExecuteState, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Sent query response back to proxy, proxyRequestId: 5, proxyId: [6:7564826229873521310:2264] 2025-10-24T16:29:21.023495Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2949: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: unknown state, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Cleanup temp tables: 0 2025-10-24T16:29:21.023515Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3040: SessionId: ydb://session/3?node_id=6&id=YzZiMDc5ZjMtYzI5OGFjMDQtNTIwNjgxOWYtZTM3MjBlNTQ=, ActorId: [6:7564826229873521857:2334], ActorState: unknown state, TraceId: 01k8bgv8pt9e4r7bnne4bp8pbb, Session actor destroyed 2025-10-24T16:29:21.024746Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:225: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM= (tmp dir name: 81b0b220-43cc-8f5f-1f79-0d908139e669) 2025-10-24T16:29:21.024844Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:229: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: unknown state, session actor bootstrapped 2025-10-24T16:29:21.024874Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:445: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ReadyState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL default; rpcActor: [6:7564826234168489177:2395] database: Root databaseId: /Root pool id: default 2025-10-24T16:29:21.024877Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:266: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ReadyState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, request placed into pool from cache: default 2025-10-24T16:29:21.024895Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:632: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Sending CompileQuery request 2025-10-24T16:29:21.065651Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7564826229873521822:2327], DatabaseId: /Root, PoolId: default, Got delete notification 2025-10-24T16:29:21.065710Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2043: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-10-24T16:29:21.065759Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2307: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, txInfo Status: Committed Kind: Pure TotalDuration: 3.22 ServerDuration: 3.191 QueriesCount: 2 2025-10-24T16:29:21.065791Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2466: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-10-24T16:29:21.065834Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2876: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-10-24T16:29:21.065837Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2937: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, EndCleanup, isFinal: 1 2025-10-24T16:29:21.065850Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2669: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: ExecuteState, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Sent query response back to proxy, proxyRequestId: 6, proxyId: [6:7564826229873521310:2264] 2025-10-24T16:29:21.065853Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2949: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: unknown state, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Cleanup temp tables: 0 2025-10-24T16:29:21.065872Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3040: SessionId: ydb://session/3?node_id=6&id=M2EyYTU2YmYtN2U5Mjk3MDUtMTExYWZiOGUtMzViMDM3ZmM=, ActorId: [6:7564826234168489178:2336], ActorState: unknown state, TraceId: 01k8bgv8r0bd9rhbxggqjg4b9c, Session actor destroyed 2025-10-24T16:29:21.066119Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-10-24T16:29:21.066130Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-10-24T16:29:21.066135Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564826234168489202:2338], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-10-24T16:29:21.066502Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7564826234168489202:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:29:21.066528Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-10-24T16:29:21.068370Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2714: SessionId: ydb://session/3?node_id=6&id=MzNlNGRhZWYtN2E1MDcyYzgtZTE0NTVkYS00MWJmMGZmOQ==, ActorId: [6:7564826229873521692:2314], ActorState: ReadyState, Session closed due to explicit close event 2025-10-24T16:29:21.068397Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2876: SessionId: ydb://session/3?node_id=6&id=MzNlNGRhZWYtN2E1MDcyYzgtZTE0NTVkYS00MWJmMGZmOQ==, ActorId: [6:7564826229873521692:2314], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-10-24T16:29:21.068400Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2937: SessionId: ydb://session/3?node_id=6&id=MzNlNGRhZWYtN2E1MDcyYzgtZTE0NTVkYS00MWJmMGZmOQ==, ActorId: [6:7564826229873521692:2314], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-10-24T16:29:21.068405Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2949: SessionId: ydb://session/3?node_id=6&id=MzNlNGRhZWYtN2E1MDcyYzgtZTE0NTVkYS00MWJmMGZmOQ==, ActorId: [6:7564826229873521692:2314], ActorState: unknown state, Cleanup temp tables: 0 2025-10-24T16:29:21.068434Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3040: SessionId: ydb://session/3?node_id=6&id=MzNlNGRhZWYtN2E1MDcyYzgtZTE0NTVkYS00MWJmMGZmOQ==, ActorId: [6:7564826229873521692:2314], ActorState: unknown state, Session actor destroyed >> TestKinesisHttpProxy::MissingAction >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> TxUsage::WriteToTopic_Demo_47_Table >> TestYmqHttpProxy::TestGetQueueUrl >> TBackupCollectionTests::DisallowedPath >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:129:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:137:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:141:2058] recipient: [1:115:2146] 2025-10-24T16:28:44.240186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-10-24T16:28:44.240212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:44.240217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-10-24T16:28:44.240226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-10-24T16:28:44.240234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-10-24T16:28:44.240239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-10-24T16:28:44.240249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-10-24T16:28:44.240262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-10-24T16:28:44.240418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-10-24T16:28:44.240486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-10-24T16:28:44.259990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-10-24T16:28:44.260028Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-10-24T16:28:44.260135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:135:2158] sender: [1:178:2058] recipient: [1:15:2062] 2025-10-24T16:28:44.262953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-10-24T16:28:44.263032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-10-24T16:28:44.263074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-10-24T16:28:44.265762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-10-24T16:28:44.265893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-10-24T16:28:44.266021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:44.266112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-10-24T16:28:44.267068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:44.267112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-10-24T16:28:44.267372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-10-24T16:28:44.267385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-10-24T16:28:44.267411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-10-24T16:28:44.267420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-10-24T16:28:44.267426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-10-24T16:28:44.267465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-10-24T16:28:44.269030Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-10-24T16:28:44.288173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-10-24T16:28:44.288281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:44.288356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-10-24T16:28:44.288364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-10-24T16:28:44.288409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-10-24T16:28:44.288424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-10-24T16:28:44.289577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:44.289626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-10-24T16:28:44.289694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:44.289705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-10-24T16:28:44.289711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-10-24T16:28:44.289717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-10-24T16:28:44.290222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:44.290240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-10-24T16:28:44.290246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-10-24T16:28:44.290680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:44.290692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-10-24T16:28:44.290698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-10-24T16:28:44.290706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-10-24T16:28:44.291392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-10-24T16:28:44.291917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-10-24T16:28:44.291973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:138:2160] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-10-24T16:28:44.292241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-10-24T16:28:44.292271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 Tab ... pipe is reset, at schemeshard: 72057594046678944 2025-10-24T16:29:20.750788Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-10-24T16:29:20.750791Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [130:613:2553] 2025-10-24T16:29:20.750805Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [130:612:2552], Recipient [130:139:2160]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1004 2025-10-24T16:29:20.750809Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-10-24T16:29:20.750814Z node 130 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-10-24T16:29:20.750825Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [130:618:2558], Recipient [130:139:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:20.750828Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:20.750831Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 2025-10-24T16:29:20.750838Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-10-24T16:29:20.750842Z node 130 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [130:613:2553] 2025-10-24T16:29:20.750854Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [130:619:2559], Recipient [130:139:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:20.750857Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-10-24T16:29:20.750860Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-10-24T16:29:20.750923Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [130:620:2560], Recipient [130:139:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:29:20.750928Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:29:20.750939Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:20.751000Z node 130 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication1" took 57us result status StatusSuccess 2025-10-24T16:29:20.751105Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication1" PathDescription { Self { Name: "Replication1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication1" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:20.751230Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [130:621:2561], Recipient [130:139:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:29:20.751236Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:29:20.751246Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:20.751270Z node 130 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication2" took 23us result status StatusSuccess 2025-10-24T16:29:20.751319Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication2" PathDescription { Self { Name: "Replication2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication2" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 ControllerId: 72075186233409547 State { StandBy { } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-10-24T16:29:20.751461Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [130:622:2562], Recipient [130:139:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-10-24T16:29:20.751468Z node 130 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-10-24T16:29:20.751477Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-10-24T16:29:20.751502Z node 130 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication3" took 23us result status StatusSuccess 2025-10-24T16:29:20.751552Z node 130 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication3" PathDescription { Self { Name: "Replication3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication3" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 ControllerId: 72075186233409548 State { StandBy { } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestYmqHttpProxy::TestSendMessage